Compare commits
9 Commits
v0.34.0.1
...
experiment
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
230474b620 | ||
|
|
cf09912280 | ||
|
|
cd317a1910 | ||
|
|
11a168442d | ||
|
|
e8d99369bc | ||
|
|
921469383e | ||
|
|
ccbf6bb222 | ||
|
|
c68d510564 | ||
|
|
ce1b393682 |
@@ -4,8 +4,8 @@ jobs:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} .
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG} .
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
||||
@@ -13,9 +13,13 @@ jobs:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_SHA1} .
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1} .
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1} matrixdotorg/synapse:latest
|
||||
- run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1}-py3 matrixdotorg/synapse:latest-py3
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}-py3
|
||||
- run: docker push matrixdotorg/synapse:latest
|
||||
- run: docker push matrixdotorg/synapse:latest-py3
|
||||
sytestpy2:
|
||||
|
||||
@@ -20,7 +20,7 @@ else
|
||||
fi
|
||||
|
||||
# Show what we are before
|
||||
git --no-pager show -s
|
||||
git show -s
|
||||
|
||||
# Set up username so it can do a merge
|
||||
git config --global user.email bot@matrix.org
|
||||
@@ -31,4 +31,4 @@ git fetch -u origin $GITBASE
|
||||
git merge --no-edit origin/$GITBASE
|
||||
|
||||
# Show what we are after.
|
||||
git --no-pager show -s
|
||||
git show -s
|
||||
|
||||
@@ -5,5 +5,3 @@ demo/etc
|
||||
tox.ini
|
||||
.git/*
|
||||
.tox/*
|
||||
debian/matrix-synapse/
|
||||
debian/matrix-synapse-*/
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# EditorConfig https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# 4 space indentation
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -18,7 +18,7 @@ homeserver*.db
|
||||
homeserver*.log
|
||||
homeserver*.log.*
|
||||
homeserver*.pid
|
||||
/homeserver*.yaml
|
||||
homeserver*.yaml
|
||||
|
||||
*.signing.key
|
||||
*.tls.crt
|
||||
@@ -26,8 +26,6 @@ homeserver*.pid
|
||||
*.tls.key
|
||||
|
||||
.coverage
|
||||
.coverage.*
|
||||
!.coverage.rc
|
||||
htmlcov
|
||||
|
||||
demo/*/*.db
|
||||
@@ -59,7 +57,3 @@ env/
|
||||
|
||||
.vscode/
|
||||
.ropeproject/
|
||||
|
||||
*.deb
|
||||
|
||||
/debs
|
||||
|
||||
10
.travis.yml
10
.travis.yml
@@ -36,24 +36,24 @@ matrix:
|
||||
env: TOX_ENV="pep8,check_isort"
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=py27,codecov TRIAL_FLAGS="-j 2"
|
||||
env: TOX_ENV=py27 TRIAL_FLAGS="-j 2"
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=py27-old TRIAL_FLAGS="-j 2"
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=py27-postgres,codecov TRIAL_FLAGS="-j 4"
|
||||
env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4"
|
||||
services:
|
||||
- postgresql
|
||||
|
||||
- python: 3.5
|
||||
env: TOX_ENV=py35,codecov TRIAL_FLAGS="-j 2"
|
||||
env: TOX_ENV=py35 TRIAL_FLAGS="-j 2"
|
||||
|
||||
- python: 3.6
|
||||
env: TOX_ENV=py36,codecov TRIAL_FLAGS="-j 2"
|
||||
env: TOX_ENV=py36 TRIAL_FLAGS="-j 2"
|
||||
|
||||
- python: 3.6
|
||||
env: TOX_ENV=py36-postgres,codecov TRIAL_FLAGS="-j 4"
|
||||
env: TOX_ENV=py36-postgres TRIAL_FLAGS="-j 4"
|
||||
services:
|
||||
- postgresql
|
||||
|
||||
|
||||
78
CHANGES.md
78
CHANGES.md
@@ -1,79 +1,3 @@
|
||||
Synapse 0.34.0.1 (2019-01-11)
|
||||
=============================
|
||||
|
||||
This release fixes CVE-2019-5885 and is recommended for all users of Synapse 0.34.0 and below.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix problem reading macaroon_secret_key from config
|
||||
([\#4373](https://github.com/matrix-org/synapse/issues/4373))
|
||||
|
||||
|
||||
Synapse 0.34.0 (2018-12-20)
|
||||
===========================
|
||||
|
||||
Synapse 0.34.0 is the first release to fully support Python 3. Synapse will now
|
||||
run on Python versions 3.5 or 3.6 (as well as 2.7). Support for Python 3.7
|
||||
remains experimental.
|
||||
|
||||
We recommend upgrading to Python 3, but make sure to read the [upgrade
|
||||
notes](UPGRADE.rst#upgrading-to-v0340) when doing so.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add 'sandbox' to CSP for media reprository ([\#4284](https://github.com/matrix-org/synapse/issues/4284))
|
||||
- Make the new landing page prettier. ([\#4294](https://github.com/matrix-org/synapse/issues/4294))
|
||||
- Fix deleting E2E room keys when using old SQLite versions. ([\#4295](https://github.com/matrix-org/synapse/issues/4295))
|
||||
- Add a welcome page for the client API port. Credit to @krombel! ([\#4289](https://github.com/matrix-org/synapse/issues/4289))
|
||||
- Remove Matrix console from the default distribution ([\#4290](https://github.com/matrix-org/synapse/issues/4290))
|
||||
- Add option to track MAU stats (but not limit people) ([\#3830](https://github.com/matrix-org/synapse/issues/3830))
|
||||
- Add an option to enable recording IPs for appservice users ([\#3831](https://github.com/matrix-org/synapse/issues/3831))
|
||||
- Rename login type `m.login.cas` to `m.login.sso` ([\#4220](https://github.com/matrix-org/synapse/issues/4220))
|
||||
- Add an option to disable search for homeservers that may not be interested in it. ([\#4230](https://github.com/matrix-org/synapse/issues/4230))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Pushrules can now again be made with non-ASCII rule IDs. ([\#4165](https://github.com/matrix-org/synapse/issues/4165))
|
||||
- The media repository now no longer fails to decode UTF-8 filenames when downloading remote media. ([\#4176](https://github.com/matrix-org/synapse/issues/4176))
|
||||
- URL previews now correctly decode non-UTF-8 text if the header contains a `<meta http-equiv="Content-Type"` header. ([\#4183](https://github.com/matrix-org/synapse/issues/4183))
|
||||
- Fix an issue where public consent URLs had two slashes. ([\#4192](https://github.com/matrix-org/synapse/issues/4192))
|
||||
- Fallback auth now accepts the session parameter on Python 3. ([\#4197](https://github.com/matrix-org/synapse/issues/4197))
|
||||
- Remove riot.im from the list of trusted Identity Servers in the default configuration ([\#4207](https://github.com/matrix-org/synapse/issues/4207))
|
||||
- fix start up failure when mau_limit_reserved_threepids set and db is postgres ([\#4211](https://github.com/matrix-org/synapse/issues/4211))
|
||||
- Fix auto join failures for servers that require user consent ([\#4223](https://github.com/matrix-org/synapse/issues/4223))
|
||||
- Fix exception caused by non-ascii event IDs ([\#4241](https://github.com/matrix-org/synapse/issues/4241))
|
||||
- Pushers can now be unsubscribed from on Python 3. ([\#4250](https://github.com/matrix-org/synapse/issues/4250))
|
||||
- Fix UnicodeDecodeError when postgres is configured to give non-English errors ([\#4253](https://github.com/matrix-org/synapse/issues/4253))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Debian packages utilising a virtualenv with bundled dependencies can now be built. ([\#4212](https://github.com/matrix-org/synapse/issues/4212))
|
||||
- Disable pager when running git-show in CI ([\#4291](https://github.com/matrix-org/synapse/issues/4291))
|
||||
- A coveragerc file has been added. ([\#4180](https://github.com/matrix-org/synapse/issues/4180))
|
||||
- Add a GitHub pull request template and add multiple issue templates ([\#4182](https://github.com/matrix-org/synapse/issues/4182))
|
||||
- Update README to reflect the fact that [\#1491](https://github.com/matrix-org/synapse/issues/1491) is fixed ([\#4188](https://github.com/matrix-org/synapse/issues/4188))
|
||||
- Run the AS senders as background processes to fix warnings ([\#4189](https://github.com/matrix-org/synapse/issues/4189))
|
||||
- Add some diagnostics to the tests to detect logcontext problems ([\#4190](https://github.com/matrix-org/synapse/issues/4190))
|
||||
- Add missing `jpeg` package prerequisite for OpenBSD in README. ([\#4193](https://github.com/matrix-org/synapse/issues/4193))
|
||||
- Add a note saying you need to manually reclaim disk space after using the Purge History API ([\#4200](https://github.com/matrix-org/synapse/issues/4200))
|
||||
- More logcontext checking in unittests ([\#4205](https://github.com/matrix-org/synapse/issues/4205))
|
||||
- Ignore `__pycache__` directories in the database schema folder ([\#4214](https://github.com/matrix-org/synapse/issues/4214))
|
||||
- Add note to UPGRADE.rst about removing riot.im from list of trusted identity servers ([\#4224](https://github.com/matrix-org/synapse/issues/4224))
|
||||
- Added automated coverage reporting to CI. ([\#4225](https://github.com/matrix-org/synapse/issues/4225))
|
||||
- Garbage-collect after each unit test to fix logcontext leaks ([\#4227](https://github.com/matrix-org/synapse/issues/4227))
|
||||
- add more detail to logging regarding "More than one row matched" error ([\#4234](https://github.com/matrix-org/synapse/issues/4234))
|
||||
- Drop sent_transactions table ([\#4244](https://github.com/matrix-org/synapse/issues/4244))
|
||||
- Add a basic .editorconfig ([\#4257](https://github.com/matrix-org/synapse/issues/4257))
|
||||
- Update README.rst and UPGRADE.rst for Python 3. ([\#4260](https://github.com/matrix-org/synapse/issues/4260))
|
||||
- Remove obsolete `verbose` and `log_file` settings from `homeserver.yaml` for Docker image. ([\#4261](https://github.com/matrix-org/synapse/issues/4261))
|
||||
|
||||
|
||||
Synapse 0.33.9 (2018-11-19)
|
||||
===========================
|
||||
|
||||
@@ -147,7 +71,7 @@ Synapse 0.33.8rc2 (2018-10-31)
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Searches that request profile info now no longer fail with a 500. Fixes
|
||||
- Searches that request profile info now no longer fail with a 500. Fixes
|
||||
a regression in 0.33.8rc1. ([\#4122](https://github.com/matrix-org/synapse/issues/4122))
|
||||
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@ recursive-include synapse/static *.js
|
||||
exclude Dockerfile
|
||||
exclude .dockerignore
|
||||
exclude test_postgresql.sh
|
||||
exclude .editorconfig
|
||||
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
@@ -36,7 +35,6 @@ prune demo/etc
|
||||
prune docker
|
||||
prune .circleci
|
||||
prune .coveragerc
|
||||
prune debian
|
||||
|
||||
exclude jenkins*
|
||||
recursive-exclude jenkins *.sh
|
||||
|
||||
80
README.rst
80
README.rst
@@ -86,7 +86,7 @@ Synapse is the reference Python/Twisted Matrix homeserver implementation.
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5, 3.6, or 2.7
|
||||
- Python 2.7
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Installing from source
|
||||
@@ -101,13 +101,13 @@ header files for Python C extensions.
|
||||
|
||||
Installing prerequisites on Ubuntu or Debian::
|
||||
|
||||
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
|
||||
|
||||
Installing prerequisites on ArchLinux::
|
||||
|
||||
sudo pacman -S base-devel python python-pip \
|
||||
sudo pacman -S base-devel python2 python-pip \
|
||||
python-setuptools python-virtualenv sqlite3
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora 25::
|
||||
@@ -126,9 +126,12 @@ Installing prerequisites on Mac OS X::
|
||||
|
||||
Installing prerequisites on Raspbian::
|
||||
|
||||
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev python-virtualenv libjpeg-dev
|
||||
sudo pip install --upgrade pip
|
||||
sudo pip install --upgrade ndg-httpsclient
|
||||
sudo pip install --upgrade virtualenv
|
||||
|
||||
Installing prerequisites on openSUSE::
|
||||
|
||||
@@ -143,21 +146,20 @@ Installing prerequisites on OpenBSD::
|
||||
|
||||
To install the Synapse homeserver run::
|
||||
|
||||
mkdir -p ~/synapse
|
||||
virtualenv -p python3 ~/synapse/env
|
||||
source ~/synapse/env/bin/activate
|
||||
virtualenv -p python2.7 ~/.synapse
|
||||
source ~/.synapse/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools
|
||||
pip install matrix-synapse
|
||||
|
||||
This installs Synapse, along with the libraries it uses, into a virtual
|
||||
environment under ``~/synapse/env``. Feel free to pick a different directory
|
||||
environment under ``~/.synapse``. Feel free to pick a different directory
|
||||
if you prefer.
|
||||
|
||||
This Synapse installation can then be later upgraded by using pip again with the
|
||||
update flag::
|
||||
|
||||
source ~/synapse/env/bin/activate
|
||||
source ~/.synapse/bin/activate
|
||||
pip install -U matrix-synapse
|
||||
|
||||
In case of problems, please see the _`Troubleshooting` section below.
|
||||
@@ -238,7 +240,7 @@ commandline script.
|
||||
|
||||
To get started, it is easiest to use the command line to register new users::
|
||||
|
||||
$ source ~/synapse/env/bin/activate
|
||||
$ source ~/.synapse/bin/activate
|
||||
$ synctl start # if not already running
|
||||
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
||||
New user localpart: erikj
|
||||
@@ -264,12 +266,13 @@ Running Synapse
|
||||
===============
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. ``~/synapse``), and::
|
||||
run (e.g. ``~/.synapse``), and::
|
||||
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
cd ~/.synapse
|
||||
source ./bin/activate
|
||||
synctl start
|
||||
|
||||
|
||||
Connecting to Synapse from a client
|
||||
===================================
|
||||
|
||||
@@ -289,6 +292,10 @@ go back in your web client and proceed further.
|
||||
If all goes well you should at least be able to log in, create a room, and
|
||||
start sending messages.
|
||||
|
||||
(The homeserver runs a web client by default at https://localhost:8448/, though
|
||||
as of the time of writing it is somewhat outdated and not really recommended -
|
||||
https://github.com/matrix-org/synapse/issues/1527).
|
||||
|
||||
.. _`client-user-reg`:
|
||||
|
||||
Registering a new user from a client
|
||||
@@ -326,7 +333,7 @@ content served to web browsers a matrix API from being able to attack webapps ho
|
||||
on the same domain. This is particularly true of sharing a matrix webclient and
|
||||
server on the same domain.
|
||||
|
||||
See https://github.com/vector-im/riot-web/issues/1977 and
|
||||
See https://github.com/vector-im/vector-web/issues/1977 and
|
||||
https://developer.github.com/changes/2014-04-25-user-content-security for more details.
|
||||
|
||||
|
||||
@@ -368,19 +375,40 @@ ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||
the necessary dependencies.
|
||||
the necessary dependencies. If the default web client is to be served (enabled by default in
|
||||
the generated config),
|
||||
https://www.archlinux.org/packages/community/any/python2-matrix-angular-sdk/ will also need to
|
||||
be installed.
|
||||
|
||||
Alternatively, to install using pip a few changes may be needed as ArchLinux
|
||||
defaults to python 3, but synapse currently assumes python 2.7 by default:
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
sudo pip2.7 install --upgrade pip
|
||||
|
||||
You also may need to explicitly specify python 2.7 again during the install
|
||||
request::
|
||||
|
||||
pip2.7 install https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv)::
|
||||
|
||||
sudo pip uninstall py-bcrypt
|
||||
sudo pip install py-bcrypt
|
||||
sudo pip2.7 uninstall py-bcrypt
|
||||
sudo pip2.7 install py-bcrypt
|
||||
|
||||
During setup of Synapse you need to call python2.7 directly again::
|
||||
|
||||
cd ~/.synapse
|
||||
python2.7 -m synapse.app.homeserver \
|
||||
--server-name machine.my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config
|
||||
|
||||
...substituting your host and domain name as appropriate.
|
||||
|
||||
FreeBSD
|
||||
-------
|
||||
@@ -447,7 +475,7 @@ You can fix this by manually upgrading pip and virtualenv::
|
||||
|
||||
sudo pip install --upgrade virtualenv
|
||||
|
||||
You can next rerun ``virtualenv -p python3 synapse`` to update the virtual env.
|
||||
You can next rerun ``virtualenv -p python2.7 synapse`` to update the virtual env.
|
||||
|
||||
Installing may fail during installing virtualenv with ``InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.``
|
||||
You can fix this by manually installing ndg-httpsclient::
|
||||
@@ -496,6 +524,16 @@ log lines and looking for any 'Processed request' lines which take more than
|
||||
a few seconds to execute. Please let us know at #matrix-dev:matrix.org if
|
||||
you see this failure mode so we can help debug it, however.
|
||||
|
||||
ArchLinux
|
||||
~~~~~~~~~
|
||||
|
||||
If running `$ synctl start` fails with 'returned non-zero exit status 1',
|
||||
you will need to explicitly call Python2.7 - either running as::
|
||||
|
||||
python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml
|
||||
|
||||
...or by editing synctl with the correct python executable.
|
||||
|
||||
|
||||
Upgrading an existing Synapse
|
||||
=============================
|
||||
@@ -693,7 +731,7 @@ port:
|
||||
|
||||
* Until v0.33.3, Synapse did not support SNI on the federation port
|
||||
(`bug #1491 <https://github.com/matrix-org/synapse/issues/1491>`_). This bug
|
||||
is now fixed, but means that federating with older servers can be unreliable
|
||||
is now fixed, but means that federating with older servers can be unreliable
|
||||
when using name-based virtual hosting.
|
||||
|
||||
Furthermore, a number of the normal reasons for using a reverse-proxy do not
|
||||
@@ -790,7 +828,7 @@ Password reset
|
||||
==============
|
||||
|
||||
If a user has registered an email address to their account using an identity
|
||||
server, they can request a password-reset token via clients such as Riot.
|
||||
server, they can request a password-reset token via clients such as Vector.
|
||||
|
||||
A manual password reset can be done via direct database access as follows.
|
||||
|
||||
|
||||
80
UPGRADE.rst
80
UPGRADE.rst
@@ -48,86 +48,6 @@ returned by the Client-Server API:
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to v0.34.0
|
||||
====================
|
||||
|
||||
1. This release is the first to fully support Python 3. Synapse will now run on
|
||||
Python versions 3.5, or 3.6 (as well as 2.7). We recommend switching to
|
||||
Python 3, as it has been shown to give performance improvements.
|
||||
|
||||
For users who have installed Synapse into a virtualenv, we recommend doing
|
||||
this by creating a new virtualenv. For example::
|
||||
|
||||
virtualenv -p python3 ~/synapse/env3
|
||||
source ~/synapse/env3/bin/activate
|
||||
pip install matrix-synapse
|
||||
|
||||
You can then start synapse as normal, having activated the new virtualenv::
|
||||
|
||||
cd ~/synapse
|
||||
source env3/bin/activate
|
||||
synctl start
|
||||
|
||||
Users who have installed from distribution packages should see the relevant
|
||||
package documentation. See below for notes on Debian packages.
|
||||
|
||||
* When upgrading to Python 3, you **must** make sure that your log files are
|
||||
configured as UTF-8, by adding ``encoding: utf8`` to the
|
||||
``RotatingFileHandler`` configuration (if you have one) in your
|
||||
``<server>.log.config`` file. For example, if your ``log.config`` file
|
||||
contains::
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
|
||||
Then you should update this to be::
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
encoding: utf8
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
|
||||
There is no need to revert this change if downgrading to Python 2.
|
||||
|
||||
We are also making available Debian packages which will run Synapse on
|
||||
Python 3. You can switch to these packages with ``apt-get install
|
||||
matrix-synapse-py3``, however, please read `debian/NEWS
|
||||
<https://github.com/matrix-org/synapse/blob/release-v0.34.0/debian/NEWS>`_
|
||||
before doing so. The existing ``matrix-synapse`` packages will continue to
|
||||
use Python 2 for the time being.
|
||||
|
||||
2. This release removes the ``riot.im`` from the default list of trusted
|
||||
identity servers.
|
||||
|
||||
If ``riot.im`` is in your homeserver's list of
|
||||
``trusted_third_party_id_servers``, you should remove it. It was added in
|
||||
case a hypothetical future identity server was put there. If you don't
|
||||
remove it, users may be unable to deactivate their accounts.
|
||||
|
||||
3. This release no longer installs the (unmaintained) Matrix Console web client
|
||||
as part of the default installation. It is possible to re-enable it by
|
||||
installing it separately and setting the ``web_client_location`` config
|
||||
option, but please consider switching to another client.
|
||||
|
||||
Upgrading to v0.33.7
|
||||
====================
|
||||
|
||||
|
||||
1
changelog.d/3830.feature
Normal file
1
changelog.d/3830.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add option to track MAU stats (but not limit people)
|
||||
1
changelog.d/4176.bugfix
Normal file
1
changelog.d/4176.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
The media repository now no longer fails to decode UTF-8 filenames when downloading remote media.
|
||||
1
changelog.d/4180.misc
Normal file
1
changelog.d/4180.misc
Normal file
@@ -0,0 +1 @@
|
||||
A coveragerc file, as well as the py36-coverage tox target, have been added.
|
||||
1
changelog.d/4182.misc
Normal file
1
changelog.d/4182.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a GitHub pull request template and add multiple issue templates
|
||||
1
changelog.d/4183.bugfix
Normal file
1
changelog.d/4183.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
URL previews now correctly decode non-UTF-8 text if the header contains a `<meta http-equiv="Content-Type"` header.
|
||||
1
changelog.d/4188.misc
Normal file
1
changelog.d/4188.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update README to reflect the fact that #1491 is fixed
|
||||
1
changelog.d/4192.bugfix
Normal file
1
changelog.d/4192.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix an issue where public consent URLs had two slashes.
|
||||
1
changelog.d/4193.misc
Normal file
1
changelog.d/4193.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add missing `jpeg` package prerequisite for OpenBSD in README.
|
||||
1
changelog.d/4197.bugfix
Normal file
1
changelog.d/4197.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fallback auth now accepts the session parameter on Python 3.
|
||||
1
changelog.d/4200.misc
Normal file
1
changelog.d/4200.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a note saying you need to manually reclaim disk space after using the Purge History API
|
||||
1
changelog.d/4204.misc
Normal file
1
changelog.d/4204.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix logcontext leaks in EmailPusher and in tests
|
||||
1
changelog.d/4207.bugfix
Normal file
1
changelog.d/4207.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Remove riot.im from the list of trusted Identity Servers in the default configuration
|
||||
7
debian/.gitignore
vendored
7
debian/.gitignore
vendored
@@ -1,7 +0,0 @@
|
||||
/matrix-synapse-py3.*.debhelper
|
||||
/matrix-synapse-py3.debhelper.log
|
||||
/matrix-synapse-py3.substvars
|
||||
/matrix-synapse-*/
|
||||
/files
|
||||
/debhelper-build-stamp
|
||||
/.debhelper
|
||||
32
debian/NEWS
vendored
32
debian/NEWS
vendored
@@ -1,32 +0,0 @@
|
||||
matrix-synapse-py3 (0.34.0) stable; urgency=medium
|
||||
|
||||
matrix-synapse-py3 is intended as a drop-in replacement for the existing
|
||||
matrix-synapse package. When the package is installed, matrix-synapse will be
|
||||
automatically uninstalled. The replacement should be relatively seamless,
|
||||
however, please note the following important differences to matrix-synapse:
|
||||
|
||||
* Most importantly, the matrix-synapse service now runs under Python 3 rather
|
||||
than Python 2.7.
|
||||
|
||||
* Synapse is installed into its own virtualenv (in /opt/venvs/matrix-synapse)
|
||||
instead of using the system python libraries. (This may mean that you can
|
||||
remove a number of old dependencies with `apt autoremove`).
|
||||
|
||||
* If you have previously manually installed any custom python extensions
|
||||
(such as matrix-synapse-rest-auth) into the system python directories, you
|
||||
will need to reinstall them in the new virtualenv. Please consult the
|
||||
documentation of the relevant extensions for further details.
|
||||
|
||||
matrix-synapse-py3 will take over responsibility for the existing
|
||||
configuration files, including the matrix-synapse systemd service.
|
||||
|
||||
Beware, however, that `apt purge matrix-synapse` will *disable* the
|
||||
matrix-synapse service (so that it will not be started on reboot), even
|
||||
though that service is no longer being provided by the matrix-synapse
|
||||
package. It can be re-enabled with `systemctl enable matrix-synapse`.
|
||||
|
||||
The matrix.org team will continue to provide Python 2 `matrix-synapse`
|
||||
packages for the next couple of releases, to allow time for system
|
||||
administrators to test the new packages.
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Wed, 19 Dec 2018 14:00:00 +0000
|
||||
48
debian/build_virtualenv
vendored
48
debian/build_virtualenv
vendored
@@ -1,48 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# runs dh_virtualenv to build the virtualenv in the build directory,
|
||||
# and then runs the trial tests against the installed synapse.
|
||||
|
||||
set -e
|
||||
|
||||
export DH_VIRTUALENV_INSTALL_ROOT=/opt/venvs
|
||||
SNAKE=/usr/bin/python3
|
||||
|
||||
# try to set the CFLAGS so any compiled C extensions are compiled with the most
|
||||
# generic as possible x64 instructions, so that compiling it on a new Intel chip
|
||||
# doesn't enable features not available on older ones or AMD.
|
||||
#
|
||||
# TODO: add similar things for non-amd64, or figure out a more generic way to
|
||||
# do this.
|
||||
|
||||
case `dpkg-architecture -q DEB_HOST_ARCH` in
|
||||
amd64)
|
||||
export CFLAGS=-march=x86-64
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||
# than the 2/3 compatible `virtualenv`.
|
||||
|
||||
dh_virtualenv \
|
||||
--install-suffix "matrix-synapse" \
|
||||
--builtin-venv \
|
||||
--setuptools \
|
||||
--python "$SNAKE" \
|
||||
--upgrade-pip \
|
||||
--preinstall="lxml" \
|
||||
--preinstall="mock" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
--extra-pip-arg="--compile"
|
||||
|
||||
# we copy the tests to a temporary directory so that we can put them on the
|
||||
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
||||
tmpdir=`mktemp -d`
|
||||
trap "rm -r $tmpdir" EXIT
|
||||
|
||||
cp -r tests "$tmpdir"
|
||||
cd debian/matrix-synapse-py3
|
||||
|
||||
PYTHONPATH="$tmpdir" \
|
||||
./opt/venvs/matrix-synapse/bin/python \
|
||||
-B -m twisted.trial --reporter=text -j2 tests
|
||||
641
debian/changelog
vendored
641
debian/changelog
vendored
@@ -1,641 +0,0 @@
|
||||
matrix-synapse-py3 (0.34.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 0.34.0.
|
||||
* Synapse is now installed into a Python 3 virtual environment with
|
||||
up-to-date dependencies.
|
||||
* The matrix-synapse service will now be restarted when the package is
|
||||
upgraded.
|
||||
(Fixes https://github.com/matrix-org/package-synapse-debian/issues/18)
|
||||
|
||||
-- Synapse packaging team <packages@matrix.org> Wed, 19 Dec 2018 14:00:00 +0000
|
||||
|
||||
matrix-synapse (0.33.9-1matrix1) stretch; urgency=medium
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Remove dependency on python-pydenticon
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* New upstream version 0.33.9
|
||||
* Refresh patches for 0.33.9
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Tue, 20 Nov 2018 10:26:05 +0000
|
||||
|
||||
matrix-synapse (0.33.8-1) stretch; urgency=medium
|
||||
|
||||
* New upstream version 0.33.8
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Thu, 01 Nov 2018 14:33:26 +0000
|
||||
|
||||
matrix-synapse (0.33.7-1matrix1) stretch; urgency=medium
|
||||
|
||||
* New upstream version 0.33.7
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 18 Oct 2018 16:18:26 +0100
|
||||
|
||||
matrix-synapse (0.33.6-1matrix1) stretch; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.33.6
|
||||
* Remove redundant explicit dep on python-bcrypt
|
||||
* Run the tests during build
|
||||
* Add dependency on python-attr 16.0
|
||||
* Refresh patches for 0.33.6
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 04 Oct 2018 14:40:29 +0100
|
||||
|
||||
matrix-synapse (0.33.5.1-1matrix1) stretch; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.33.5.1
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Mon, 24 Sep 2018 18:20:51 +0100
|
||||
|
||||
matrix-synapse (0.33.5-1matrix1) stretch; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.33.5
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Mon, 24 Sep 2018 16:06:23 +0100
|
||||
|
||||
matrix-synapse (0.33.4-1mx1) stretch; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.33.4
|
||||
* Avoid telling people to install packages with pip
|
||||
(fixes https://github.com/matrix-org/synapse/issues/3743)
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Fri, 07 Sep 2018 14:06:17 +0100
|
||||
|
||||
matrix-synapse (0.33.3.1-1mx1) stretch; urgency=critical
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Imported Upstream version 0.33.3.1
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 06 Sep 2018 11:20:37 +0100
|
||||
|
||||
matrix-synapse (0.33.3-2) stretch; urgency=medium
|
||||
|
||||
* We now require python-twisted 17.1.0 or later
|
||||
* Add recommendations for python-psycopg2 and python-lxml
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 23 Aug 2018 19:04:08 +0100
|
||||
|
||||
matrix-synapse (0.33.3-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.33.3
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Wed, 22 Aug 2018 14:50:30 +0100
|
||||
|
||||
matrix-synapse (0.33.2-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.33.2
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 09 Aug 2018 15:40:42 +0100
|
||||
|
||||
matrix-synapse (0.33.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.33.1
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Thu, 02 Aug 2018 15:52:19 +0100
|
||||
|
||||
matrix-synapse (0.33.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.33.0
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 19 Jul 2018 13:38:41 +0100
|
||||
|
||||
matrix-synapse (0.32.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.32.1
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Fri, 06 Jul 2018 17:16:29 +0100
|
||||
|
||||
matrix-synapse (0.32.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.32.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Fri, 06 Jul 2018 15:34:06 +0100
|
||||
|
||||
matrix-synapse (0.31.2-1) jessie; urgency=high
|
||||
|
||||
* New upstream version 0.31.2
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Thu, 14 Jun 2018 16:49:07 +0100
|
||||
|
||||
matrix-synapse (0.31.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.31.1
|
||||
* Require python-prometheus-client >= 0.0.14
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Fri, 08 Jun 2018 16:11:55 +0100
|
||||
|
||||
matrix-synapse (0.31.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.31.0
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Wed, 06 Jun 2018 17:23:10 +0100
|
||||
|
||||
matrix-synapse (0.30.0-1) jessie; urgency=medium
|
||||
|
||||
[ Michael Kaye ]
|
||||
* update homeserver.yaml to be somewhat more modern.
|
||||
|
||||
[ Erik Johnston ]
|
||||
* New upstream version 0.30.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Thu, 24 May 2018 16:43:16 +0100
|
||||
|
||||
matrix-synapse (0.29.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.29.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Wed, 16 May 2018 17:43:06 +0100
|
||||
|
||||
matrix-synapse (0.28.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.28.1
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Tue, 01 May 2018 19:21:39 +0100
|
||||
|
||||
matrix-synapse (0.28.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream 0.28.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Fri, 27 Apr 2018 13:15:49 +0100
|
||||
|
||||
matrix-synapse (0.27.4-1) jessie; urgency=medium
|
||||
|
||||
* Bump canonicaljson version
|
||||
* New upstream 0.27.4
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Fri, 13 Apr 2018 13:37:47 +0100
|
||||
|
||||
matrix-synapse (0.27.3-1) jessie; urgency=medium
|
||||
|
||||
* Report stats should default to off
|
||||
* Refresh patches
|
||||
* New upstream 0.27.3
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Wed, 11 Apr 2018 11:43:47 +0100
|
||||
|
||||
matrix-synapse (0.27.2-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.27.2
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Mon, 26 Mar 2018 16:41:57 +0100
|
||||
|
||||
matrix-synapse (0.27.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.27.1
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Mon, 26 Mar 2018 16:22:03 +0100
|
||||
|
||||
matrix-synapse (0.27.0-2) jessie; urgency=medium
|
||||
|
||||
* Fix bcrypt dependency
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Mon, 26 Mar 2018 16:00:26 +0100
|
||||
|
||||
matrix-synapse (0.27.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.27.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Mon, 26 Mar 2018 15:07:52 +0100
|
||||
|
||||
matrix-synapse (0.26.1-1) jessie; urgency=medium
|
||||
|
||||
* Ignore RC
|
||||
* New upstream version 0.26.1
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Fri, 16 Mar 2018 00:40:08 +0000
|
||||
|
||||
matrix-synapse (0.26.0-1) jessie; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Remove `level` for `file` log handler
|
||||
|
||||
[ Erik Johnston ]
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Fri, 05 Jan 2018 11:21:26 +0000
|
||||
|
||||
matrix-synapse (0.25.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.25.1
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Mon, 20 Nov 2017 10:05:37 +0000
|
||||
|
||||
matrix-synapse (0.25.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.25.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Wed, 15 Nov 2017 11:36:32 +0000
|
||||
|
||||
matrix-synapse (0.24.1-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.24.1
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Tue, 24 Oct 2017 15:05:03 +0100
|
||||
|
||||
matrix-synapse (0.24.0-1) jessie; urgency=medium
|
||||
|
||||
* New upstream version 0.24.0
|
||||
|
||||
-- Erik Johnston <erik@matrix.org> Mon, 23 Oct 2017 14:11:46 +0100
|
||||
|
||||
matrix-synapse (0.23.1-1) xenial; urgency=medium
|
||||
|
||||
* Imported upstream version 0.23.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 05 Oct 2017 15:28:25 +0100
|
||||
|
||||
matrix-synapse (0.23.0-1) jessie; urgency=medium
|
||||
|
||||
* Fix patch after refactor
|
||||
* Add patch to remove requirement on affinity package
|
||||
* refresh webclient patch
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Mon, 02 Oct 2017 15:34:57 +0100
|
||||
|
||||
matrix-synapse (0.22.1-1) jessie; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.22.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 06 Jul 2017 18:14:13 +0100
|
||||
|
||||
matrix-synapse (0.22.0-1) jessie; urgency=medium
|
||||
|
||||
* Imported upstream version 0.22.0
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 06 Jul 2017 10:47:45 +0100
|
||||
|
||||
matrix-synapse (0.21.1-1) jessie; urgency=medium
|
||||
|
||||
* Imported upstream version 0.21.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 15 Jun 2017 13:31:13 +0100
|
||||
|
||||
matrix-synapse (0.21.0-1) jessie; urgency=medium
|
||||
|
||||
* Imported upstream version 0.21.0
|
||||
* Update patches
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 18 May 2017 14:16:54 +0100
|
||||
|
||||
matrix-synapse (0.20.0-2) jessie; urgency=medium
|
||||
|
||||
* Depend on python-jsonschema
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 12 Apr 2017 10:41:46 +0100
|
||||
|
||||
matrix-synapse (0.20.0-1) jessie; urgency=medium
|
||||
|
||||
* Imported upstream version 0.20.0
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 11 Apr 2017 12:58:26 +0100
|
||||
|
||||
matrix-synapse (0.19.3-1) jessie; urgency=medium
|
||||
|
||||
* Imported upstream version 0.19.3
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 21 Mar 2017 13:45:41 +0000
|
||||
|
||||
matrix-synapse (0.19.2-1) jessie; urgency=medium
|
||||
|
||||
[ Sunil Mohan Adapa ]
|
||||
* Bump standards version to 3.9.8
|
||||
* Add debian/copyright file
|
||||
* Don't ignore errors in debian/config
|
||||
* Reformat depenedencies in debian/control
|
||||
* Internationalize strings in template file
|
||||
* Update package description
|
||||
* Add lsb-base as dependency
|
||||
* Update questions for debconf style
|
||||
* Add man pages for all binaries
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Imported upstream version 0.19.2
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 21 Feb 2017 13:55:00 +0000
|
||||
|
||||
matrix-synapse (0.19.1-1) jessie; urgency=medium
|
||||
|
||||
* Imported upstream version 0.19.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 09 Feb 2017 11:53:27 +0000
|
||||
|
||||
matrix-synapse (0.19.0-1) jessie; urgency=medium
|
||||
|
||||
This build requires python-twisted 0.19.0, which may need to be installed
|
||||
from backports.
|
||||
|
||||
[ Bryce Chidester ]
|
||||
* Add EnvironmentFile to the systemd service
|
||||
* Create matrix-synapse.default
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Imported upstream version 0.19.0
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Sat, 04 Feb 2017 09:58:29 +0000
|
||||
|
||||
matrix-synapse (0.18.7-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.4
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Mon, 09 Jan 2017 15:10:21 +0000
|
||||
|
||||
matrix-synapse (0.18.5-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.5
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 16 Dec 2016 10:51:59 +0000
|
||||
|
||||
matrix-synapse (0.18.4-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.4
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 22 Nov 2016 10:33:41 +0000
|
||||
|
||||
matrix-synapse (0.18.3-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.3
|
||||
* Remove upstreamed ldap3 patch
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 08 Nov 2016 15:01:49 +0000
|
||||
|
||||
matrix-synapse (0.18.2-2) trusty; urgency=high
|
||||
|
||||
* Patch ldap3 support to workaround differences in python-ldap3 0.9,
|
||||
bug allowed unauthorized logins if ldap3 0.9 was used.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 08 Nov 2016 13:48:09 +0000
|
||||
|
||||
matrix-synapse (0.18.2-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.2
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 01 Nov 2016 13:30:45 +0000
|
||||
|
||||
matrix-synapse (0.18.1-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 05 Oct 2016 14:52:53 +0100
|
||||
|
||||
matrix-synapse (0.18.0-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.18.0
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Mon, 19 Sep 2016 17:38:48 +0100
|
||||
|
||||
matrix-synapse (0.17.3-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.17.3
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 09 Sep 2016 11:18:18 +0100
|
||||
|
||||
matrix-synapse (0.17.2-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.17.2
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 08 Sep 2016 15:37:14 +0100
|
||||
|
||||
matrix-synapse (0.17.1-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.17.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 24 Aug 2016 15:11:29 +0100
|
||||
|
||||
matrix-synapse (0.17.0-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.17.0
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Mon, 08 Aug 2016 13:56:15 +0100
|
||||
|
||||
matrix-synapse (0.16.1-r1-1) trusty; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.16.1-r1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 08 Jul 2016 16:47:35 +0100
|
||||
|
||||
matrix-synapse (0.16.1-2) trusty; urgency=critical
|
||||
|
||||
* Apply security patch
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 08 Jul 2016 11:05:27 +0100
|
||||
|
||||
matrix-synapse (0.16.1-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 21 Jun 2016 14:56:48 +0100
|
||||
|
||||
matrix-synapse (0.16.0-3) trusty; urgency=medium
|
||||
|
||||
* Don't require strict nacl==0.3.0 requirement
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Mon, 20 Jun 2016 13:24:22 +0100
|
||||
|
||||
matrix-synapse (0.16.0-2) trusty; urgency=medium
|
||||
|
||||
* Also change the permissions of /etc/matrix-synapse
|
||||
* Add apt webclient instructions
|
||||
* Fix up patches
|
||||
* Update default homeserver.yaml
|
||||
* Add patch
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 10 Jun 2016 14:06:20 +0100
|
||||
|
||||
matrix-synapse (0.16.0-1) trusty; urgency=medium
|
||||
|
||||
[ David A Roberts ]
|
||||
* systemd
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Fixup postinst and matrix-synapse.service
|
||||
* Handle email optional deps
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 09 Jun 2016 16:17:01 +0100
|
||||
|
||||
matrix-synapse (0.14.0-1) trusty; urgency=medium
|
||||
|
||||
* Remove saml2 module requirements
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 30 Mar 2016 14:31:17 +0100
|
||||
|
||||
matrix-synapse (0.13.3-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 11 Feb 2016 16:35:39 +0000
|
||||
|
||||
matrix-synapse (0.13.2-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 11 Feb 2016 11:01:16 +0000
|
||||
|
||||
matrix-synapse (0.13.0-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 10 Feb 2016 16:34:39 +0000
|
||||
|
||||
matrix-synapse (0.12.0-2) trusty; urgency=medium
|
||||
|
||||
* Don't default `registerion_shared_secret` config option
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 06 Jan 2016 16:34:02 +0000
|
||||
|
||||
matrix-synapse (0.12.0-1) stable; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.12.0
|
||||
|
||||
-- Mark Haines <mark@matrix.org> Mon, 04 Jan 2016 15:38:33 +0000
|
||||
|
||||
matrix-synapse (0.11.1-1) unstable; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.11.1
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 20 Nov 2015 17:56:52 +0000
|
||||
|
||||
matrix-synapse (0.11.0-r2-1) stable; urgency=medium
|
||||
|
||||
* Imported Upstream version 0.11.0-r2
|
||||
* Add gbp.conf
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 19 Nov 2015 13:52:36 +0000
|
||||
|
||||
matrix-synapse (0.11.0-1) wheezy; urgency=medium
|
||||
|
||||
* Fix dependencies.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 17 Nov 2015 16:28:06 +0000
|
||||
|
||||
matrix-synapse (0.11.0-0) wheezy; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 17 Nov 2015 16:03:01 +0000
|
||||
|
||||
matrix-synapse (0.10.0-2) wheezy; urgency=medium
|
||||
|
||||
* Rebuild for wheezy.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 04 Sep 2015 14:21:03 +0100
|
||||
|
||||
matrix-synapse (0.10.0-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 03 Sep 2015 10:08:34 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc6-3) trusty; urgency=medium
|
||||
|
||||
* Create log directory.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 02 Sep 2015 17:49:07 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc6-2) trusty; urgency=medium
|
||||
|
||||
* Add patch to work around upstream bug in config directory handling.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 02 Sep 2015 17:42:42 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc6-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 02 Sep 2015 17:21:21 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc5-3) trusty; urgency=medium
|
||||
|
||||
* Update init script to work.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Fri, 28 Aug 2015 10:51:56 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc5-2) trusty; urgency=medium
|
||||
|
||||
* Fix where python files are installed.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 27 Aug 2015 11:55:39 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc5-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 27 Aug 2015 11:26:54 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc4-1) trusty; urgency=medium
|
||||
|
||||
* New upstream version.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 27 Aug 2015 10:29:31 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-7) trusty; urgency=medium
|
||||
|
||||
* Add debian/watch
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 26 Aug 2015 17:57:08 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-6) trusty; urgency=medium
|
||||
|
||||
* Deps.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 26 Aug 2015 17:07:13 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-5) trusty; urgency=medium
|
||||
|
||||
* Deps.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 26 Aug 2015 16:18:02 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-4) trusty; urgency=medium
|
||||
|
||||
* More deps.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 26 Aug 2015 14:09:27 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-3) trusty; urgency=medium
|
||||
|
||||
* Update deps.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 26 Aug 2015 13:49:20 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-2) trusty; urgency=medium
|
||||
|
||||
* Add more deps.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Wed, 26 Aug 2015 13:25:45 +0100
|
||||
|
||||
matrix-synapse (0.10.0~rc3-1) trusty; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Tue, 25 Aug 2015 17:52:33 +0100
|
||||
|
||||
matrix-synapse (0.9.3-1~trusty1) trusty; urgency=medium
|
||||
|
||||
* Rebuild for trusty.
|
||||
|
||||
-- Erik Johnston <erikj@matrix.org> Thu, 20 Aug 2015 15:05:43 +0100
|
||||
|
||||
matrix-synapse (0.9.3-1) wheezy; urgency=medium
|
||||
|
||||
* New upstream release
|
||||
* Create a user, "matrix-synapse", to run as
|
||||
* Log to /var/log/matrix-synapse/ directory
|
||||
* Override the way synapse looks for the angular SDK (syweb) so it finds the
|
||||
packaged one
|
||||
|
||||
-- Paul "LeoNerd" Evans <paul@matrix.org> Fri, 07 Aug 2015 15:32:12 +0100
|
||||
|
||||
matrix-synapse (0.9.2-2) wheezy; urgency=medium
|
||||
|
||||
* Supply a default config file
|
||||
* Create directory in /var/lib
|
||||
* Use debconf to ask the user for the server name at installation time
|
||||
|
||||
-- Paul "LeoNerd" Evans <paul@matrix.org> Thu, 06 Aug 2015 15:28:00 +0100
|
||||
|
||||
matrix-synapse (0.9.2-1) wheezy; urgency=low
|
||||
|
||||
* source package automatically created by stdeb 0.8.2
|
||||
|
||||
-- Paul "LeoNerd" Evans <paul@matrix.org> Fri, 12 Jun 2015 14:32:03 +0100
|
||||
1
debian/compat
vendored
1
debian/compat
vendored
@@ -1 +0,0 @@
|
||||
9
|
||||
9
debian/config
vendored
9
debian/config
vendored
@@ -1,9 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/share/debconf/confmodule
|
||||
|
||||
db_input high matrix-synapse/server-name || true
|
||||
db_input high matrix-synapse/report-stats || true
|
||||
db_go
|
||||
37
debian/control
vendored
37
debian/control
vendored
@@ -1,37 +0,0 @@
|
||||
Source: matrix-synapse-py3
|
||||
Section: contrib/python
|
||||
Priority: extra
|
||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||
Build-Depends:
|
||||
debhelper (>= 9),
|
||||
dh-systemd,
|
||||
dh-virtualenv (>= 1.0),
|
||||
lsb-release,
|
||||
python3-dev,
|
||||
python3,
|
||||
python3-setuptools,
|
||||
python3-pip,
|
||||
python3-venv,
|
||||
tar,
|
||||
Standards-Version: 3.9.5
|
||||
Homepage: https://github.com/matrix-org/synapse
|
||||
|
||||
Package: matrix-synapse-py3
|
||||
Architecture: amd64
|
||||
Conflicts: matrix-synapse
|
||||
Pre-Depends: dpkg (>= 1.16.1)
|
||||
Depends:
|
||||
adduser,
|
||||
debconf,
|
||||
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||
python3,
|
||||
${misc:Depends},
|
||||
# some of our scripts use perl, but none of them are important,
|
||||
# so we put perl:Depends in Suggests rather than Depends.
|
||||
Suggests:
|
||||
sqlite3,
|
||||
${perl:Depends},
|
||||
Description: Open federated Instant Messaging and VoIP server
|
||||
Matrix is an ambitious new ecosystem for open federated Instant
|
||||
Messaging and VoIP. Synapse is a reference Matrix server
|
||||
implementation.
|
||||
118
debian/copyright
vendored
118
debian/copyright
vendored
@@ -1,118 +0,0 @@
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: synapse
|
||||
Source: https://github.com/matrix-org/synapse
|
||||
|
||||
Files: *
|
||||
Copyright: 2014-2017, OpenMarket Ltd, 2017-2018 New Vector Ltd
|
||||
License: Apache-2.0
|
||||
|
||||
Files: synapse/config/saml2.py
|
||||
Copyright: 2015, Ericsson
|
||||
License: Apache-2.0
|
||||
|
||||
Files: synapse/config/jwt.py
|
||||
Copyright: 2015, Niklas Riekenbrauck
|
||||
License: Apache-2.0
|
||||
|
||||
Files: synapse/config/workers.py
|
||||
Copyright: 2016, matrix.org
|
||||
License: Apache-2.0
|
||||
|
||||
Files: synapse/config/repository.py
|
||||
Copyright: 2014-2015, matrix.org
|
||||
License: Apache-2.0
|
||||
|
||||
Files: contrib/jitsimeetbridge/unjingle/strophe/base64.js
|
||||
Copyright: Public Domain (Tyler Akins http://rumkin.com)
|
||||
License: public-domain
|
||||
This code was written by Tyler Akins and has been placed in the
|
||||
public domain. It would be nice if you left this header intact.
|
||||
Base64 code from Tyler Akins -- http://rumkin.com
|
||||
|
||||
Files: contrib/jitsimeetbridge/unjingle/strophe/md5.js
|
||||
Copyright: 1999-2002, Paul Johnston & Contributors
|
||||
License: BSD-3-clause
|
||||
|
||||
Files: contrib/jitsimeetbridge/unjingle/strophe/strophe.js
|
||||
Copyright: 2006-2008, OGG, LLC
|
||||
License: Expat
|
||||
|
||||
Files: contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
|
||||
Copyright: 2010 passive.ly LLC
|
||||
License: Expat
|
||||
|
||||
Files: contrib/jitsimeetbridge/unjingle/*.js
|
||||
Copyright: 2014 Jitsi
|
||||
License: Apache-2.0
|
||||
|
||||
Files: debian/*
|
||||
Copyright: 2016-2017, Erik Johnston <erik@matrix.org>
|
||||
2017, Rahul De <rahulde@swecha.net>
|
||||
2017, Sunil Mohan Adapa <sunil@medhas.org>
|
||||
License: Apache-2.0
|
||||
|
||||
License: Apache-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
.
|
||||
On Debian systems, the full text of the Apache License version
|
||||
2.0 can be found in the file
|
||||
`/usr/share/common-licenses/Apache-2.0'.
|
||||
|
||||
License: BSD-3-clause
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
.
|
||||
Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following
|
||||
disclaimer. Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided with
|
||||
the distribution.
|
||||
.
|
||||
Neither the name of the author nor the names of its contributors may
|
||||
be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
.
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
License: Expat
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
3
debian/dirs
vendored
3
debian/dirs
vendored
@@ -1,3 +0,0 @@
|
||||
etc/matrix-synapse
|
||||
var/lib/matrix-synapse
|
||||
var/log/matrix-synapse
|
||||
90
debian/hash_password.1
vendored
90
debian/hash_password.1
vendored
@@ -1,90 +0,0 @@
|
||||
.\" generated with Ronn/v0.7.3
|
||||
.\" http://github.com/rtomayko/ronn/tree/0.7.3
|
||||
.
|
||||
.TH "HASH_PASSWORD" "1" "February 2017" "" ""
|
||||
.
|
||||
.SH "NAME"
|
||||
\fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
|
||||
.
|
||||
.SH "SYNOPSIS"
|
||||
\fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
|
||||
.
|
||||
.SH "DESCRIPTION"
|
||||
\fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
|
||||
.
|
||||
.P
|
||||
\fBhash_password\fR takes a password as an parameter either on the command line or the \fBSTDIN\fR if not supplied\.
|
||||
.
|
||||
.P
|
||||
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB10\fR\.
|
||||
.
|
||||
.P
|
||||
The hashed password is written on the \fBSTDOUT\fR\.
|
||||
.
|
||||
.SH "FILES"
|
||||
A sample YAML file accepted by \fBhash_password\fR is described below:
|
||||
.
|
||||
.P
|
||||
bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
|
||||
.
|
||||
.SH "OPTIONS"
|
||||
.
|
||||
.TP
|
||||
\fB\-p\fR, \fB\-\-password\fR
|
||||
Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
|
||||
.
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-config\fR
|
||||
Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
|
||||
.
|
||||
.SH "EXAMPLES"
|
||||
Hash from the command line:
|
||||
.
|
||||
.IP "" 4
|
||||
.
|
||||
.nf
|
||||
|
||||
$ hash_password \-p "p@ssw0rd"
|
||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.P
|
||||
Hash from the STDIN:
|
||||
.
|
||||
.IP "" 4
|
||||
.
|
||||
.nf
|
||||
|
||||
$ hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.P
|
||||
Using a config file:
|
||||
.
|
||||
.IP "" 4
|
||||
.
|
||||
.nf
|
||||
|
||||
$ hash_password \-c config\.yml
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.SH "COPYRIGHT"
|
||||
This man page was written by Rahul De <\fIrahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
|
||||
.
|
||||
.SH "SEE ALSO"
|
||||
synctl(1), synapse_port_db(1), register_new_matrix_user(1)
|
||||
69
debian/hash_password.ronn
vendored
69
debian/hash_password.ronn
vendored
@@ -1,69 +0,0 @@
|
||||
hash_password(1) -- Calculate the hash of a new password, so that passwords can be reset
|
||||
========================================================================================
|
||||
|
||||
## SYNOPSIS
|
||||
|
||||
`hash_password` [`-p`|`--password` [password]] [`-c`|`--config` <file>]
|
||||
|
||||
## DESCRIPTION
|
||||
|
||||
**hash_password** calculates the hash of a supplied password using bcrypt.
|
||||
|
||||
`hash_password` takes a password as an parameter either on the command line
|
||||
or the `STDIN` if not supplied.
|
||||
|
||||
It accepts an YAML file which can be used to specify parameters like the
|
||||
number of rounds for bcrypt and password_config section having the pepper
|
||||
value used for the hashing. By default `bcrypt_rounds` is set to **10**.
|
||||
|
||||
The hashed password is written on the `STDOUT`.
|
||||
|
||||
## FILES
|
||||
|
||||
A sample YAML file accepted by `hash_password` is described below:
|
||||
|
||||
bcrypt_rounds: 17
|
||||
password_config:
|
||||
pepper: "random hashing pepper"
|
||||
|
||||
## OPTIONS
|
||||
|
||||
* `-p`, `--password`:
|
||||
Read the password form the command line if [password] is supplied.
|
||||
If not, prompt the user and read the password form the `STDIN`.
|
||||
It is not recommended to type the password on the command line
|
||||
directly. Use the STDIN instead.
|
||||
|
||||
* `-c`, `--config`:
|
||||
Read the supplied YAML <file> containing the options `bcrypt_rounds`
|
||||
and the `password_config` section containing the `pepper` value.
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
Hash from the command line:
|
||||
|
||||
$ hash_password -p "p@ssw0rd"
|
||||
$2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8.X8fWFpum7SxZ9MFe
|
||||
|
||||
Hash from the STDIN:
|
||||
|
||||
$ hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX.rcuAbM8ErLoUhybG
|
||||
|
||||
Using a config file:
|
||||
|
||||
$ hash_password -c config.yml
|
||||
Password:
|
||||
Confirm password:
|
||||
$2b$12$CwI.wBNr.w3kmiUlV3T5s.GT2wH7uebDCovDrCOh18dFedlANK99O
|
||||
|
||||
## COPYRIGHT
|
||||
|
||||
This man page was written by Rahul De <<rahulde@swecha.net>>
|
||||
for Debian GNU/Linux distribution.
|
||||
|
||||
## SEE ALSO
|
||||
|
||||
synctl(1), synapse_port_db(1), register_new_matrix_user(1)
|
||||
617
debian/homeserver.yaml
vendored
617
debian/homeserver.yaml
vendored
@@ -1,617 +0,0 @@
|
||||
# vim:ft=yaml
|
||||
# PEM encoded X509 certificate for TLS.
|
||||
# You can replace the self-signed certificate that synapse
|
||||
# autogenerates on launch with your own SSL certificate + key pair
|
||||
# if you like. Any required intermediary certificates can be
|
||||
# appended after the primary certificate in hierarchical order.
|
||||
tls_certificate_path: "/etc/matrix-synapse/homeserver.tls.crt"
|
||||
|
||||
# PEM encoded private key for TLS
|
||||
tls_private_key_path: "/etc/matrix-synapse/homeserver.tls.key"
|
||||
|
||||
# PEM dh parameters for ephemeral keys
|
||||
tls_dh_params_path: "/etc/matrix-synapse/homeserver.tls.dh"
|
||||
|
||||
# Don't bind to the https port
|
||||
no_tls: False
|
||||
|
||||
# List of allowed TLS fingerprints for this server to publish along
|
||||
# with the signing keys for this server. Other matrix servers that
|
||||
# make HTTPS requests to this server will check that the TLS
|
||||
# certificates returned by this server match one of the fingerprints.
|
||||
#
|
||||
# Synapse automatically adds the fingerprint of its own certificate
|
||||
# to the list. So if federation traffic is handled directly by synapse
|
||||
# then no modification to the list is required.
|
||||
#
|
||||
# If synapse is run behind a load balancer that handles the TLS then it
|
||||
# will be necessary to add the fingerprints of the certificates used by
|
||||
# the loadbalancers to this list if they are different to the one
|
||||
# synapse is using.
|
||||
#
|
||||
# Homeservers are permitted to cache the list of TLS fingerprints
|
||||
# returned in the key responses up to the "valid_until_ts" returned in
|
||||
# key. It may be necessary to publish the fingerprints of a new
|
||||
# certificate and wait until the "valid_until_ts" of the previous key
|
||||
# responses have passed before deploying it.
|
||||
#
|
||||
# You can calculate a fingerprint from a given TLS listener via:
|
||||
# openssl s_client -connect $host:$port < /dev/null 2> /dev/null |
|
||||
# openssl x509 -outform DER | openssl sha256 -binary | base64 | tr -d '='
|
||||
# or by checking matrix.org/federationtester/api/report?server_name=$host
|
||||
#
|
||||
tls_fingerprints: []
|
||||
# tls_fingerprints: [{"sha256": "<base64_encoded_sha256_fingerprint>"}]
|
||||
|
||||
|
||||
## Server ##
|
||||
|
||||
# When running as a daemon, the file to store the pid in
|
||||
pid_file: "/var/run/matrix-synapse.pid"
|
||||
|
||||
# CPU affinity mask. Setting this restricts the CPUs on which the
|
||||
# process will be scheduled. It is represented as a bitmask, with the
|
||||
# lowest order bit corresponding to the first logical CPU and the
|
||||
# highest order bit corresponding to the last logical CPU. Not all CPUs
|
||||
# may exist on a given system but a mask may specify more CPUs than are
|
||||
# present.
|
||||
#
|
||||
# For example:
|
||||
# 0x00000001 is processor #0,
|
||||
# 0x00000003 is processors #0 and #1,
|
||||
# 0xFFFFFFFF is all processors (#0 through #31).
|
||||
#
|
||||
# Pinning a Python process to a single CPU is desirable, because Python
|
||||
# is inherently single-threaded due to the GIL, and can suffer a
|
||||
# 30-40% slowdown due to cache blow-out and thread context switching
|
||||
# if the scheduler happens to schedule the underlying threads across
|
||||
# different cores. See
|
||||
# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
|
||||
#
|
||||
# cpu_affinity: 0xFFFFFFFF
|
||||
|
||||
# The path to the web client which will be served at /_matrix/client/
|
||||
# if 'webclient' is configured under the 'listeners' configuration.
|
||||
#
|
||||
# web_client_location: "/path/to/web/root"
|
||||
|
||||
# The public-facing base URL for the client API (not including _matrix/...)
|
||||
# public_baseurl: https://example.com:8448/
|
||||
|
||||
# Set the soft limit on the number of file descriptors synapse can use
|
||||
# Zero is used to indicate synapse should set the soft limit to the
|
||||
# hard limit.
|
||||
soft_file_limit: 0
|
||||
|
||||
# The GC threshold parameters to pass to `gc.set_threshold`, if defined
|
||||
# gc_thresholds: [700, 10, 10]
|
||||
|
||||
# Set the limit on the returned events in the timeline in the get
|
||||
# and sync operations. The default value is -1, means no upper limit.
|
||||
# filter_timeline_limit: 5000
|
||||
|
||||
# Whether room invites to users on this server should be blocked
|
||||
# (except those sent by local server admins). The default is False.
|
||||
# block_non_admin_invites: True
|
||||
|
||||
# Restrict federation to the following whitelist of domains.
|
||||
# N.B. we recommend also firewalling your federation listener to limit
|
||||
# inbound federation traffic as early as possible, rather than relying
|
||||
# purely on this application-layer restriction. If not specified, the
|
||||
# default is to whitelist everything.
|
||||
#
|
||||
# federation_domain_whitelist:
|
||||
# - lon.example.com
|
||||
# - nyc.example.com
|
||||
# - syd.example.com
|
||||
|
||||
# List of ports that Synapse should listen on, their purpose and their
|
||||
# configuration.
|
||||
listeners:
|
||||
# Main HTTPS listener
|
||||
# For when matrix traffic is sent directly to synapse.
|
||||
-
|
||||
# The port to listen for HTTPS requests on.
|
||||
port: 8448
|
||||
|
||||
# Local addresses to listen on.
|
||||
# On Linux and Mac OS, `::` will listen on all IPv4 and IPv6
|
||||
# addresses by default. For most other OSes, this will only listen
|
||||
# on IPv6.
|
||||
bind_addresses:
|
||||
- '::'
|
||||
- '0.0.0.0'
|
||||
|
||||
# This is a 'http' listener, allows us to specify 'resources'.
|
||||
type: http
|
||||
|
||||
tls: true
|
||||
|
||||
# Use the X-Forwarded-For (XFF) header as the client IP and not the
|
||||
# actual client IP.
|
||||
x_forwarded: false
|
||||
|
||||
# List of HTTP resources to serve on this listener.
|
||||
resources:
|
||||
-
|
||||
# List of resources to host on this listener.
|
||||
names:
|
||||
- client # The client-server APIs, both v1 and v2
|
||||
- webclient # The bundled webclient.
|
||||
|
||||
# Should synapse compress HTTP responses to clients that support it?
|
||||
# This should be disabled if running synapse behind a load balancer
|
||||
# that can do automatic compression.
|
||||
compress: true
|
||||
|
||||
- names: [federation] # Federation APIs
|
||||
compress: false
|
||||
|
||||
# optional list of additional endpoints which can be loaded via
|
||||
# dynamic modules
|
||||
# additional_resources:
|
||||
# "/_matrix/my/custom/endpoint":
|
||||
# module: my_module.CustomRequestHandler
|
||||
# config: {}
|
||||
|
||||
# Unsecure HTTP listener,
|
||||
# For when matrix traffic passes through loadbalancer that unwraps TLS.
|
||||
- port: 8008
|
||||
tls: false
|
||||
bind_addresses: ['::', '0.0.0.0']
|
||||
type: http
|
||||
|
||||
x_forwarded: false
|
||||
|
||||
resources:
|
||||
- names: [client, webclient]
|
||||
compress: true
|
||||
- names: [federation]
|
||||
compress: false
|
||||
|
||||
# Turn on the twisted ssh manhole service on localhost on the given
|
||||
# port.
|
||||
# - port: 9000
|
||||
# bind_addresses: ['::1', '127.0.0.1']
|
||||
# type: manhole
|
||||
|
||||
|
||||
# Database configuration
|
||||
database:
|
||||
# The database engine name
|
||||
name: "sqlite3"
|
||||
# Arguments to pass to the engine
|
||||
args:
|
||||
# Path to the database
|
||||
database: "/var/lib/matrix-synapse/homeserver.db"
|
||||
|
||||
# Number of events to cache in memory.
|
||||
event_cache_size: "10K"
|
||||
|
||||
|
||||
# A yaml python logging config file
|
||||
log_config: "/etc/matrix-synapse/log.yaml"
|
||||
|
||||
|
||||
|
||||
## Ratelimiting ##
|
||||
|
||||
# Number of messages a client can send per second
|
||||
rc_messages_per_second: 0.2
|
||||
|
||||
# Number of message a client can send before being throttled
|
||||
rc_message_burst_count: 10.0
|
||||
|
||||
# The federation window size in milliseconds
|
||||
federation_rc_window_size: 1000
|
||||
|
||||
# The number of federation requests from a single server in a window
|
||||
# before the server will delay processing the request.
|
||||
federation_rc_sleep_limit: 10
|
||||
|
||||
# The duration in milliseconds to delay processing events from
|
||||
# remote servers by if they go over the sleep limit.
|
||||
federation_rc_sleep_delay: 500
|
||||
|
||||
# The maximum number of concurrent federation requests allowed
|
||||
# from a single server
|
||||
federation_rc_reject_limit: 50
|
||||
|
||||
# The number of federation requests to concurrently process from a
|
||||
# single server
|
||||
federation_rc_concurrent: 3
|
||||
|
||||
|
||||
|
||||
# Directory where uploaded images and attachments are stored.
|
||||
media_store_path: "/var/lib/matrix-synapse/media"
|
||||
|
||||
# Media storage providers allow media to be stored in different
|
||||
# locations.
|
||||
# media_storage_providers:
|
||||
# - module: file_system
|
||||
# # Whether to write new local files.
|
||||
# store_local: false
|
||||
# # Whether to write new remote media
|
||||
# store_remote: false
|
||||
# # Whether to block upload requests waiting for write to this
|
||||
# # provider to complete
|
||||
# store_synchronous: false
|
||||
# config:
|
||||
# directory: /mnt/some/other/directory
|
||||
|
||||
# Directory where in-progress uploads are stored.
|
||||
uploads_path: "/var/lib/matrix-synapse/uploads"
|
||||
|
||||
# The largest allowed upload size in bytes
|
||||
max_upload_size: "10M"
|
||||
|
||||
# Maximum number of pixels that will be thumbnailed
|
||||
max_image_pixels: "32M"
|
||||
|
||||
# Whether to generate new thumbnails on the fly to precisely match
|
||||
# the resolution requested by the client. If true then whenever
|
||||
# a new resolution is requested by the client the server will
|
||||
# generate a new thumbnail. If false the server will pick a thumbnail
|
||||
# from a precalculated list.
|
||||
dynamic_thumbnails: false
|
||||
|
||||
# List of thumbnail to precalculate when an image is uploaded.
|
||||
thumbnail_sizes:
|
||||
- width: 32
|
||||
height: 32
|
||||
method: crop
|
||||
- width: 96
|
||||
height: 96
|
||||
method: crop
|
||||
- width: 320
|
||||
height: 240
|
||||
method: scale
|
||||
- width: 640
|
||||
height: 480
|
||||
method: scale
|
||||
- width: 800
|
||||
height: 600
|
||||
method: scale
|
||||
|
||||
# Is the preview URL API enabled? If enabled, you *must* specify
|
||||
# an explicit url_preview_ip_range_blacklist of IPs that the spider is
|
||||
# denied from accessing.
|
||||
url_preview_enabled: False
|
||||
|
||||
# List of IP address CIDR ranges that the URL preview spider is denied
|
||||
# from accessing. There are no defaults: you must explicitly
|
||||
# specify a list for URL previewing to work. You should specify any
|
||||
# internal services in your network that you do not want synapse to try
|
||||
# to connect to, otherwise anyone in any Matrix room could cause your
|
||||
# synapse to issue arbitrary GET requests to your internal services,
|
||||
# causing serious security issues.
|
||||
#
|
||||
# url_preview_ip_range_blacklist:
|
||||
# - '127.0.0.0/8'
|
||||
# - '10.0.0.0/8'
|
||||
# - '172.16.0.0/12'
|
||||
# - '192.168.0.0/16'
|
||||
# - '100.64.0.0/10'
|
||||
# - '169.254.0.0/16'
|
||||
#
|
||||
# List of IP address CIDR ranges that the URL preview spider is allowed
|
||||
# to access even if they are specified in url_preview_ip_range_blacklist.
|
||||
# This is useful for specifying exceptions to wide-ranging blacklisted
|
||||
# target IP ranges - e.g. for enabling URL previews for a specific private
|
||||
# website only visible in your network.
|
||||
#
|
||||
# url_preview_ip_range_whitelist:
|
||||
# - '192.168.1.1'
|
||||
|
||||
# Optional list of URL matches that the URL preview spider is
|
||||
# denied from accessing. You should use url_preview_ip_range_blacklist
|
||||
# in preference to this, otherwise someone could define a public DNS
|
||||
# entry that points to a private IP address and circumvent the blacklist.
|
||||
# This is more useful if you know there is an entire shape of URL that
|
||||
# you know that will never want synapse to try to spider.
|
||||
#
|
||||
# Each list entry is a dictionary of url component attributes as returned
|
||||
# by urlparse.urlsplit as applied to the absolute form of the URL. See
|
||||
# https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
|
||||
# The values of the dictionary are treated as an filename match pattern
|
||||
# applied to that component of URLs, unless they start with a ^ in which
|
||||
# case they are treated as a regular expression match. If all the
|
||||
# specified component matches for a given list item succeed, the URL is
|
||||
# blacklisted.
|
||||
#
|
||||
# url_preview_url_blacklist:
|
||||
# # blacklist any URL with a username in its URI
|
||||
# - username: '*'
|
||||
#
|
||||
# # blacklist all *.google.com URLs
|
||||
# - netloc: 'google.com'
|
||||
# - netloc: '*.google.com'
|
||||
#
|
||||
# # blacklist all plain HTTP URLs
|
||||
# - scheme: 'http'
|
||||
#
|
||||
# # blacklist http(s)://www.acme.com/foo
|
||||
# - netloc: 'www.acme.com'
|
||||
# path: '/foo'
|
||||
#
|
||||
# # blacklist any URL with a literal IPv4 address
|
||||
# - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
|
||||
|
||||
# The largest allowed URL preview spidering size in bytes
|
||||
max_spider_size: "10M"
|
||||
|
||||
|
||||
|
||||
|
||||
## Captcha ##
|
||||
# See docs/CAPTCHA_SETUP for full details of configuring this.
|
||||
|
||||
# This Home Server's ReCAPTCHA public key.
|
||||
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||
|
||||
# This Home Server's ReCAPTCHA private key.
|
||||
recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
||||
|
||||
# Enables ReCaptcha checks when registering, preventing signup
|
||||
# unless a captcha is answered. Requires a valid ReCaptcha
|
||||
# public/private key.
|
||||
enable_registration_captcha: False
|
||||
|
||||
# A secret key used to bypass the captcha test entirely.
|
||||
#captcha_bypass_secret: "YOUR_SECRET_HERE"
|
||||
|
||||
# The API endpoint to use for verifying m.login.recaptcha responses.
|
||||
recaptcha_siteverify_api: "https://www.google.com/recaptcha/api/siteverify"
|
||||
|
||||
|
||||
## Turn ##
|
||||
|
||||
# The public URIs of the TURN server to give to clients
|
||||
turn_uris: []
|
||||
|
||||
# The shared secret used to compute passwords for the TURN server
|
||||
turn_shared_secret: "YOUR_SHARED_SECRET"
|
||||
|
||||
# The Username and password if the TURN server needs them and
|
||||
# does not use a token
|
||||
#turn_username: "TURNSERVER_USERNAME"
|
||||
#turn_password: "TURNSERVER_PASSWORD"
|
||||
|
||||
# How long generated TURN credentials last
|
||||
turn_user_lifetime: "1h"
|
||||
|
||||
# Whether guests should be allowed to use the TURN server.
|
||||
# This defaults to True, otherwise VoIP will be unreliable for guests.
|
||||
# However, it does introduce a slight security risk as it allows users to
|
||||
# connect to arbitrary endpoints without having first signed up for a
|
||||
# valid account (e.g. by passing a CAPTCHA).
|
||||
turn_allow_guests: False
|
||||
|
||||
|
||||
## Registration ##
|
||||
|
||||
# Enable registration for new users.
|
||||
enable_registration: False
|
||||
|
||||
# The user must provide all of the below types of 3PID when registering.
|
||||
#
|
||||
# registrations_require_3pid:
|
||||
# - email
|
||||
# - msisdn
|
||||
|
||||
# Mandate that users are only allowed to associate certain formats of
|
||||
# 3PIDs with accounts on this server.
|
||||
#
|
||||
# allowed_local_3pids:
|
||||
# - medium: email
|
||||
# pattern: ".*@matrix\.org"
|
||||
# - medium: email
|
||||
# pattern: ".*@vector\.im"
|
||||
# - medium: msisdn
|
||||
# pattern: "\+44"
|
||||
|
||||
# If set, allows registration by anyone who also has the shared
|
||||
# secret, even if registration is otherwise disabled.
|
||||
# registration_shared_secret: <PRIVATE STRING>
|
||||
|
||||
# Set the number of bcrypt rounds used to generate password hash.
|
||||
# Larger numbers increase the work factor needed to generate the hash.
|
||||
# The default number is 12 (which equates to 2^12 rounds).
|
||||
# N.B. that increasing this will exponentially increase the time required
|
||||
# to register or login - e.g. 24 => 2^24 rounds which will take >20 mins.
|
||||
bcrypt_rounds: 12
|
||||
|
||||
# Allows users to register as guests without a password/email/etc, and
|
||||
# participate in rooms hosted on this server which have been made
|
||||
# accessible to anonymous users.
|
||||
allow_guest_access: False
|
||||
|
||||
# The list of identity servers trusted to verify third party
|
||||
# identifiers by this server.
|
||||
trusted_third_party_id_servers:
|
||||
- matrix.org
|
||||
- vector.im
|
||||
- riot.im
|
||||
|
||||
# Users who register on this homeserver will automatically be joined
|
||||
# to these rooms
|
||||
#auto_join_rooms:
|
||||
# - "#example:example.com"
|
||||
|
||||
|
||||
## Metrics ###
|
||||
|
||||
# Enable collection and rendering of performance metrics
|
||||
enable_metrics: False
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
# A list of event types that will be included in the room_invite_state
|
||||
room_invite_state_types:
|
||||
- "m.room.join_rules"
|
||||
- "m.room.canonical_alias"
|
||||
- "m.room.avatar"
|
||||
- "m.room.name"
|
||||
|
||||
|
||||
# A list of application service config file to use
|
||||
app_service_config_files: []
|
||||
|
||||
|
||||
# macaroon_secret_key: <PRIVATE STRING>
|
||||
|
||||
# Used to enable access token expiration.
|
||||
expire_access_token: False
|
||||
|
||||
## Signing Keys ##
|
||||
|
||||
# Path to the signing key to sign messages with
|
||||
signing_key_path: "/etc/matrix-synapse/homeserver.signing.key"
|
||||
|
||||
# The keys that the server used to sign messages with but won't use
|
||||
# to sign new messages. E.g. it has lost its private key
|
||||
old_signing_keys: {}
|
||||
# "ed25519:auto":
|
||||
# # Base64 encoded public key
|
||||
# key: "The public part of your old signing key."
|
||||
# # Millisecond POSIX timestamp when the key expired.
|
||||
# expired_ts: 123456789123
|
||||
|
||||
# How long key response published by this server is valid for.
|
||||
# Used to set the valid_until_ts in /key/v2 APIs.
|
||||
# Determines how quickly servers will query to check which keys
|
||||
# are still valid.
|
||||
key_refresh_interval: "1d" # 1 Day.
|
||||
|
||||
# The trusted servers to download signing keys from.
|
||||
perspectives:
|
||||
servers:
|
||||
"matrix.org":
|
||||
verify_keys:
|
||||
"ed25519:auto":
|
||||
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||
|
||||
|
||||
|
||||
# Enable SAML2 for registration and login. Uses pysaml2
|
||||
# config_path: Path to the sp_conf.py configuration file
|
||||
# idp_redirect_url: Identity provider URL which will redirect
|
||||
# the user back to /login/saml2 with proper info.
|
||||
# See pysaml2 docs for format of config.
|
||||
#saml2_config:
|
||||
# enabled: true
|
||||
# config_path: "/home/erikj/git/synapse/sp_conf.py"
|
||||
# idp_redirect_url: "http://test/idp"
|
||||
|
||||
|
||||
|
||||
# Enable CAS for registration and login.
|
||||
#cas_config:
|
||||
# enabled: true
|
||||
# server_url: "https://cas-server.com"
|
||||
# service_url: "https://homeserver.domain.com:8448"
|
||||
# #required_attributes:
|
||||
# # name: value
|
||||
|
||||
|
||||
# The JWT needs to contain a globally unique "sub" (subject) claim.
|
||||
#
|
||||
# jwt_config:
|
||||
# enabled: true
|
||||
# secret: "a secret"
|
||||
# algorithm: "HS256"
|
||||
|
||||
|
||||
|
||||
# Enable password for login.
|
||||
password_config:
|
||||
enabled: true
|
||||
# Uncomment and change to a secret random string for extra security.
|
||||
# DO NOT CHANGE THIS AFTER INITIAL SETUP!
|
||||
#pepper: ""
|
||||
|
||||
|
||||
|
||||
# Enable sending emails for notification events
|
||||
# Defining a custom URL for Riot is only needed if email notifications
|
||||
# should contain links to a self-hosted installation of Riot; when set
|
||||
# the "app_name" setting is ignored.
|
||||
#
|
||||
# If your SMTP server requires authentication, the optional smtp_user &
|
||||
# smtp_pass variables should be used
|
||||
#
|
||||
#email:
|
||||
# enable_notifs: false
|
||||
# smtp_host: "localhost"
|
||||
# smtp_port: 25
|
||||
# smtp_user: "exampleusername"
|
||||
# smtp_pass: "examplepassword"
|
||||
# require_transport_security: False
|
||||
# notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
|
||||
# app_name: Matrix
|
||||
# template_dir: res/templates
|
||||
# notif_template_html: notif_mail.html
|
||||
# notif_template_text: notif_mail.txt
|
||||
# notif_for_new_users: True
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
|
||||
|
||||
# password_providers:
|
||||
# - module: "ldap_auth_provider.LdapAuthProvider"
|
||||
# config:
|
||||
# enabled: true
|
||||
# uri: "ldap://ldap.example.com:389"
|
||||
# start_tls: true
|
||||
# base: "ou=users,dc=example,dc=com"
|
||||
# attributes:
|
||||
# uid: "cn"
|
||||
# mail: "email"
|
||||
# name: "givenName"
|
||||
# #bind_dn:
|
||||
# #bind_password:
|
||||
# #filter: "(objectClass=posixAccount)"
|
||||
|
||||
|
||||
|
||||
# Clients requesting push notifications can either have the body of
|
||||
# the message sent in the notification poke along with other details
|
||||
# like the sender, or just the event ID and room ID (`event_id_only`).
|
||||
# If clients choose the former, this option controls whether the
|
||||
# notification request includes the content of the event (other details
|
||||
# like the sender are still included). For `event_id_only` push, it
|
||||
# has no effect.
|
||||
|
||||
# For modern android devices the notification content will still appear
|
||||
# because it is loaded by the app. iPhone, however will send a
|
||||
# notification saying only that a message arrived and who it came from.
|
||||
#
|
||||
#push:
|
||||
# include_content: true
|
||||
|
||||
|
||||
# spam_checker:
|
||||
# module: "my_custom_project.SuperSpamChecker"
|
||||
# config:
|
||||
# example_option: 'things'
|
||||
|
||||
|
||||
# Whether to allow non server admins to create groups on this server
|
||||
enable_group_creation: false
|
||||
|
||||
# If enabled, non server admins can only create groups with local parts
|
||||
# starting with this prefix
|
||||
# group_creation_prefix: "unofficial/"
|
||||
|
||||
|
||||
|
||||
# User Directory configuration
|
||||
#
|
||||
# 'search_all_users' defines whether to search all users visible to your HS
|
||||
# when searching the user directory, rather than limiting to users visible
|
||||
# in public rooms. Defaults to false. If you set it True, you'll have to run
|
||||
# UPDATE user_directory_stream_pos SET stream_id = NULL;
|
||||
# on your database to tell it to rebuild the user_directory search indexes.
|
||||
#
|
||||
#user_directory:
|
||||
# search_all_users: false
|
||||
2
debian/install
vendored
2
debian/install
vendored
@@ -1,2 +0,0 @@
|
||||
debian/homeserver.yaml etc/matrix-synapse
|
||||
debian/log.yaml etc/matrix-synapse
|
||||
36
debian/log.yaml
vendored
36
debian/log.yaml
vendored
@@ -1,36 +0,0 @@
|
||||
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.util.logcontext.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: /var/log/matrix-synapse/homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
encoding: utf8
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
level: WARN
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
|
||||
synapse.storage.SQL:
|
||||
level: INFO
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [file, console]
|
||||
4
debian/manpages
vendored
4
debian/manpages
vendored
@@ -1,4 +0,0 @@
|
||||
debian/hash_password.1
|
||||
debian/register_new_matrix_user.1
|
||||
debian/synapse_port_db.1
|
||||
debian/synctl.1
|
||||
4
debian/matrix-synapse-py3.links
vendored
4
debian/matrix-synapse-py3.links
vendored
@@ -1,4 +0,0 @@
|
||||
opt/venvs/matrix-synapse/bin/hash_password usr/bin/hash_password
|
||||
opt/venvs/matrix-synapse/bin/register_new_matrix_user usr/bin/register_new_matrix_user
|
||||
opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db
|
||||
opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl
|
||||
39
debian/matrix-synapse-py3.postinst
vendored
39
debian/matrix-synapse-py3.postinst
vendored
@@ -1,39 +0,0 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
. /usr/share/debconf/confmodule
|
||||
|
||||
CONFIGFILE_SERVERNAME="/etc/matrix-synapse/conf.d/server_name.yaml"
|
||||
CONFIGFILE_REPORTSTATS="/etc/matrix-synapse/conf.d/report_stats.yaml"
|
||||
USER="matrix-synapse"
|
||||
|
||||
case "$1" in
|
||||
configure|reconfigure)
|
||||
# Set server name in config file
|
||||
mkdir -p "/etc/matrix-synapse/conf.d/"
|
||||
db_get matrix-synapse/server-name
|
||||
|
||||
if [ "$RET" ]; then
|
||||
echo "server_name: $RET" > $CONFIGFILE_SERVERNAME
|
||||
fi
|
||||
|
||||
db_get matrix-synapse/report-stats
|
||||
if [ "$RET" ]; then
|
||||
echo "report_stats: $RET" > $CONFIGFILE_REPORTSTATS
|
||||
fi
|
||||
|
||||
if ! getent passwd $USER >/dev/null; then
|
||||
adduser --quiet --system --no-create-home --home /var/lib/matrix-synapse $USER
|
||||
fi
|
||||
|
||||
for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do
|
||||
if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then
|
||||
dpkg-statoverride --force --quiet --update --add $USER nogroup 0755 $DIR
|
||||
fi
|
||||
done
|
||||
|
||||
;;
|
||||
esac
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
31
debian/matrix-synapse-py3.preinst
vendored
31
debian/matrix-synapse-py3.preinst
vendored
@@ -1,31 +0,0 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
# Attempt to undo some of the braindamage caused by
|
||||
# https://github.com/matrix-org/package-synapse-debian/issues/18.
|
||||
#
|
||||
# Due to reasons [1], the old python2 matrix-synapse package will not stop the
|
||||
# service when the package is uninstalled. Our maintainer scripts will do the
|
||||
# right thing in terms of ensuring the service is enabled and unmasked, but
|
||||
# then do a `systemctl start matrix-synapse`, which of course does nothing -
|
||||
# leaving the old (py2) service running.
|
||||
#
|
||||
# There should normally be no reason for the service to be running during our
|
||||
# preinst, so we assume that if it *is* running, it's due to that situation,
|
||||
# and stop it.
|
||||
#
|
||||
# [1] dh_systemd_start doesn't do anything because it sees that there is an
|
||||
# init.d script with the same name, so leaves it to dh_installinit.
|
||||
#
|
||||
# dh_installinit doesn't do anything because somebody gave it a --no-start
|
||||
# for unknown reasons.
|
||||
|
||||
if [ -x /bin/systemctl ]; then
|
||||
if /bin/systemctl --quiet is-active -- matrix-synapse; then
|
||||
echo >&2 "stopping existing matrix-synapse service"
|
||||
/bin/systemctl stop matrix-synapse || true
|
||||
fi
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
9
debian/matrix-synapse-py3.triggers
vendored
9
debian/matrix-synapse-py3.triggers
vendored
@@ -1,9 +0,0 @@
|
||||
# Register interest in Python interpreter changes and
|
||||
# don't make the Python package dependent on the virtualenv package
|
||||
# processing (noawait)
|
||||
interest-noawait /usr/bin/python3.5
|
||||
interest-noawait /usr/bin/python3.6
|
||||
interest-noawait /usr/bin/python3.7
|
||||
|
||||
# Also provide a symbolic trigger for all dh-virtualenv packages
|
||||
interest dh-virtualenv-interpreter-update
|
||||
2
debian/matrix-synapse.default
vendored
2
debian/matrix-synapse.default
vendored
@@ -1,2 +0,0 @@
|
||||
# Specify environment variables used when running Synapse
|
||||
# SYNAPSE_CACHE_FACTOR=1 (default)
|
||||
15
debian/matrix-synapse.service
vendored
15
debian/matrix-synapse.service
vendored
@@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=Synapse Matrix homeserver
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=matrix-synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=/etc/default/matrix-synapse
|
||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
1
debian/po/POTFILES.in
vendored
1
debian/po/POTFILES.in
vendored
@@ -1 +0,0 @@
|
||||
[type: gettext/rfc822deb] templates
|
||||
56
debian/po/templates.pot
vendored
56
debian/po/templates.pot
vendored
@@ -1,56 +0,0 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the matrix-synapse package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: matrix-synapse\n"
|
||||
"Report-Msgid-Bugs-To: matrix-synapse@packages.debian.org\n"
|
||||
"POT-Creation-Date: 2017-02-21 07:51+0000\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"Language: \n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=CHARSET\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#. Type: string
|
||||
#. Description
|
||||
#: ../templates:1001
|
||||
msgid "Name of the server:"
|
||||
msgstr ""
|
||||
|
||||
#. Type: string
|
||||
#. Description
|
||||
#: ../templates:1001
|
||||
msgid ""
|
||||
"The name that this homeserver will appear as, to clients and other servers "
|
||||
"via federation. This name should match the SRV record published in DNS."
|
||||
msgstr ""
|
||||
|
||||
#. Type: boolean
|
||||
#. Description
|
||||
#: ../templates:2001
|
||||
msgid "Report anonymous statistics?"
|
||||
msgstr ""
|
||||
|
||||
#. Type: boolean
|
||||
#. Description
|
||||
#: ../templates:2001
|
||||
msgid ""
|
||||
"Developers of Matrix and Synapse really appreciate helping the project out "
|
||||
"by reporting anonymized usage statistics from this homeserver. Only very "
|
||||
"basic aggregate data (e.g. number of users) will be reported, but it helps "
|
||||
"track the growth of the Matrix community, and helps in making Matrix a "
|
||||
"success, as well as to convince other networks that they should peer with "
|
||||
"Matrix."
|
||||
msgstr ""
|
||||
|
||||
#. Type: boolean
|
||||
#. Description
|
||||
#: ../templates:2001
|
||||
msgid "Thank you."
|
||||
msgstr ""
|
||||
72
debian/register_new_matrix_user.1
vendored
72
debian/register_new_matrix_user.1
vendored
@@ -1,72 +0,0 @@
|
||||
.\" generated with Ronn/v0.7.3
|
||||
.\" http://github.com/rtomayko/ronn/tree/0.7.3
|
||||
.
|
||||
.TH "REGISTER_NEW_MATRIX_USER" "1" "February 2017" "" ""
|
||||
.
|
||||
.SH "NAME"
|
||||
\fBregister_new_matrix_user\fR \- Used to register new users with a given home server when registration has been disabled
|
||||
.
|
||||
.SH "SYNOPSIS"
|
||||
\fBregister_new_matrix_user\fR options\.\.\.
|
||||
.
|
||||
.SH "DESCRIPTION"
|
||||
\fBregister_new_matrix_user\fR registers new users with a given home server when registration has been disabled\. For this to work, the home server must be configured with the \'registration_shared_secret\' option set\.
|
||||
.
|
||||
.P
|
||||
This accepts the user credentials like the username, password, is user an admin or not and registers the user onto the homeserver database\. Also, a YAML file containing the shared secret can be provided\. If not, the shared secret can be provided via the command line\.
|
||||
.
|
||||
.P
|
||||
By default it assumes the home server URL to be \fBhttps://localhost:8448\fR\. This can be changed via the \fBserver_url\fR command line option\.
|
||||
.
|
||||
.SH "FILES"
|
||||
A sample YAML file accepted by \fBregister_new_matrix_user\fR is described below:
|
||||
.
|
||||
.IP "" 4
|
||||
.
|
||||
.nf
|
||||
|
||||
registration_shared_secret: "s3cr3t"
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.SH "OPTIONS"
|
||||
.
|
||||
.TP
|
||||
\fB\-u\fR, \fB\-\-user\fR
|
||||
Local part of the new user\. Will prompt if omitted\.
|
||||
.
|
||||
.TP
|
||||
\fB\-p\fR, \fB\-\-password\fR
|
||||
New password for user\. Will prompt if omitted\. Supplying the password on the command line is not recommended\. Use the STDIN instead\.
|
||||
.
|
||||
.TP
|
||||
\fB\-a\fR, \fB\-\-admin\fR
|
||||
Register new user as an admin\. Will prompt if omitted\.
|
||||
.
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-config\fR
|
||||
Path to server config file containing the shared secret\.
|
||||
.
|
||||
.TP
|
||||
\fB\-k\fR, \fB\-\-shared\-secret\fR
|
||||
Shared secret as defined in server config file\. This is an optional parameter as it can be also supplied via the YAML file\.
|
||||
.
|
||||
.TP
|
||||
\fBserver_url\fR
|
||||
URL of the home server\. Defaults to \'https://localhost:8448\'\.
|
||||
.
|
||||
.SH "EXAMPLES"
|
||||
.
|
||||
.nf
|
||||
|
||||
$ register_new_matrix_user \-u user1 \-p p@ssword \-a \-c config\.yaml
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.SH "COPYRIGHT"
|
||||
This man page was written by Rahul De <\fIrahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
|
||||
.
|
||||
.SH "SEE ALSO"
|
||||
synctl(1), synapse_port_db(1), hash_password(1)
|
||||
61
debian/register_new_matrix_user.ronn
vendored
61
debian/register_new_matrix_user.ronn
vendored
@@ -1,61 +0,0 @@
|
||||
register_new_matrix_user(1) -- Used to register new users with a given home server when registration has been disabled
|
||||
======================================================================================================================
|
||||
|
||||
## SYNOPSIS
|
||||
|
||||
`register_new_matrix_user` options...
|
||||
|
||||
## DESCRIPTION
|
||||
|
||||
**register_new_matrix_user** registers new users with a given home server when
|
||||
registration has been disabled. For this to work, the home server must be
|
||||
configured with the 'registration_shared_secret' option set.
|
||||
|
||||
This accepts the user credentials like the username, password, is user an
|
||||
admin or not and registers the user onto the homeserver database. Also,
|
||||
a YAML file containing the shared secret can be provided. If not, the
|
||||
shared secret can be provided via the command line.
|
||||
|
||||
By default it assumes the home server URL to be `https://localhost:8448`.
|
||||
This can be changed via the `server_url` command line option.
|
||||
|
||||
## FILES
|
||||
|
||||
A sample YAML file accepted by `register_new_matrix_user` is described below:
|
||||
|
||||
registration_shared_secret: "s3cr3t"
|
||||
|
||||
## OPTIONS
|
||||
|
||||
* `-u`, `--user`:
|
||||
Local part of the new user. Will prompt if omitted.
|
||||
|
||||
* `-p`, `--password`:
|
||||
New password for user. Will prompt if omitted. Supplying the password
|
||||
on the command line is not recommended. Use the STDIN instead.
|
||||
|
||||
* `-a`, `--admin`:
|
||||
Register new user as an admin. Will prompt if omitted.
|
||||
|
||||
* `-c`, `--config`:
|
||||
Path to server config file containing the shared secret.
|
||||
|
||||
* `-k`, `--shared-secret`:
|
||||
Shared secret as defined in server config file. This is an optional
|
||||
parameter as it can be also supplied via the YAML file.
|
||||
|
||||
* `server_url`:
|
||||
URL of the home server. Defaults to 'https://localhost:8448'.
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
$ register_new_matrix_user -u user1 -p p@ssword -a -c config.yaml
|
||||
|
||||
## COPYRIGHT
|
||||
|
||||
This man page was written by Rahul De <<rahulde@swecha.net>>
|
||||
for Debian GNU/Linux distribution.
|
||||
|
||||
## SEE ALSO
|
||||
|
||||
synctl(1), synapse_port_db(1), hash_password(1)
|
||||
22
debian/rules
vendored
22
debian/rules
vendored
@@ -1,22 +0,0 @@
|
||||
#!/usr/bin/make -f
|
||||
#
|
||||
# Build Debian package using https://github.com/spotify/dh-virtualenv
|
||||
#
|
||||
|
||||
override_dh_systemd_enable:
|
||||
dh_systemd_enable --name=matrix-synapse
|
||||
|
||||
override_dh_installinit:
|
||||
dh_installinit --name=matrix-synapse
|
||||
|
||||
override_dh_strip:
|
||||
|
||||
override_dh_shlibdeps:
|
||||
|
||||
override_dh_virtualenv:
|
||||
./debian/build_virtualenv
|
||||
|
||||
# We are restricted to compat level 9 (because xenial), so have to
|
||||
# enable the systemd bits manually.
|
||||
%:
|
||||
dh $@ --with python-virtualenv --with systemd
|
||||
1
debian/source/format
vendored
1
debian/source/format
vendored
@@ -1 +0,0 @@
|
||||
3.0 (native)
|
||||
98
debian/synapse_port_db.1
vendored
98
debian/synapse_port_db.1
vendored
@@ -1,98 +0,0 @@
|
||||
.\" generated with Ronn/v0.7.3
|
||||
.\" http://github.com/rtomayko/ronn/tree/0.7.3
|
||||
.
|
||||
.TH "SYNAPSE_PORT_DB" "1" "February 2017" "" ""
|
||||
.
|
||||
.SH "NAME"
|
||||
\fBsynapse_port_db\fR \- A script to port an existing synapse SQLite database to a new PostgreSQL database\.
|
||||
.
|
||||
.SH "SYNOPSIS"
|
||||
\fBsynapse_port_db\fR [\-v] \-\-sqlite\-database=\fIdbfile\fR \-\-postgres\-config=\fIyamlconfig\fR [\-\-curses] [\-\-batch\-size=\fIbatch\-size\fR]
|
||||
.
|
||||
.SH "DESCRIPTION"
|
||||
\fBsynapse_port_db\fR ports an existing synapse SQLite database to a new PostgreSQL database\.
|
||||
.
|
||||
.P
|
||||
SQLite database is specified with \fB\-\-sqlite\-database\fR option and PostgreSQL configuration required to connect to PostgreSQL database is provided using \fB\-\-postgres\-config\fR configuration\. The configuration is specified in YAML format\.
|
||||
.
|
||||
.SH "OPTIONS"
|
||||
.
|
||||
.TP
|
||||
\fB\-v\fR
|
||||
Print log messages in \fBdebug\fR level instead of \fBinfo\fR level\.
|
||||
.
|
||||
.TP
|
||||
\fB\-\-sqlite\-database\fR
|
||||
The snapshot of the SQLite database file\. This must not be currently used by a running synapse server\.
|
||||
.
|
||||
.TP
|
||||
\fB\-\-postgres\-config\fR
|
||||
The database config file for the PostgreSQL database\.
|
||||
.
|
||||
.TP
|
||||
\fB\-\-curses\fR
|
||||
Display a curses based progress UI\.
|
||||
.
|
||||
.SH "CONFIG FILE"
|
||||
The postgres configuration file must be a valid YAML file with the following options\.
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBdatabase\fR: Database configuration section\. This section header can be ignored and the options below may be specified as top level keys\.
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBname\fR: Connector to use when connecting to the database\. This value must be \fBpsycopg2\fR\.
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBargs\fR: DB API 2\.0 compatible arguments to send to the \fBpsycopg2\fR module\.
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBdbname\fR \- the database name
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBuser\fR \- user name used to authenticate
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBpassword\fR \- password used to authenticate
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBhost\fR \- database host address (defaults to UNIX socket if not provided)
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBport\fR \- connection port number (defaults to 5432 if not provided)
|
||||
.
|
||||
.IP "" 0
|
||||
|
||||
.
|
||||
.IP "\(bu" 4
|
||||
\fBsynchronous_commit\fR: Optional\. Default is True\. If the value is \fBFalse\fR, enable asynchronous commit and don\'t wait for the server to call fsync before ending the transaction\. See: https://www\.postgresql\.org/docs/current/static/wal\-async\-commit\.html
|
||||
.
|
||||
.IP "" 0
|
||||
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.P
|
||||
Following example illustrates the configuration file format\.
|
||||
.
|
||||
.IP "" 4
|
||||
.
|
||||
.nf
|
||||
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
dbname: synapsedb
|
||||
user: synapseuser
|
||||
password: ORohmi9Eet=ohphi
|
||||
host: localhost
|
||||
synchronous_commit: false
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.SH "COPYRIGHT"
|
||||
This man page was written by Sunil Mohan Adapa <\fIsunil@medhas\.org\fR> for Debian GNU/Linux distribution\.
|
||||
.
|
||||
.SH "SEE ALSO"
|
||||
synctl(1), hash_password(1), register_new_matrix_user(1)
|
||||
87
debian/synapse_port_db.ronn
vendored
87
debian/synapse_port_db.ronn
vendored
@@ -1,87 +0,0 @@
|
||||
synapse_port_db(1) -- A script to port an existing synapse SQLite database to a new PostgreSQL database.
|
||||
=============================================
|
||||
|
||||
## SYNOPSIS
|
||||
|
||||
`synapse_port_db` [-v] --sqlite-database=<dbfile> --postgres-config=<yamlconfig> [--curses] [--batch-size=<batch-size>]
|
||||
|
||||
## DESCRIPTION
|
||||
|
||||
**synapse_port_db** ports an existing synapse SQLite database to a new
|
||||
PostgreSQL database.
|
||||
|
||||
SQLite database is specified with `--sqlite-database` option and
|
||||
PostgreSQL configuration required to connect to PostgreSQL database is
|
||||
provided using `--postgres-config` configuration. The configuration
|
||||
is specified in YAML format.
|
||||
|
||||
## OPTIONS
|
||||
|
||||
* `-v`:
|
||||
Print log messages in `debug` level instead of `info` level.
|
||||
|
||||
* `--sqlite-database`:
|
||||
The snapshot of the SQLite database file. This must not be
|
||||
currently used by a running synapse server.
|
||||
|
||||
* `--postgres-config`:
|
||||
The database config file for the PostgreSQL database.
|
||||
|
||||
* `--curses`:
|
||||
Display a curses based progress UI.
|
||||
|
||||
## CONFIG FILE
|
||||
|
||||
The postgres configuration file must be a valid YAML file with the
|
||||
following options.
|
||||
|
||||
* `database`:
|
||||
Database configuration section. This section header can be
|
||||
ignored and the options below may be specified as top level
|
||||
keys.
|
||||
|
||||
* `name`:
|
||||
Connector to use when connecting to the database. This value must
|
||||
be `psycopg2`.
|
||||
|
||||
* `args`:
|
||||
DB API 2.0 compatible arguments to send to the `psycopg2` module.
|
||||
|
||||
* `dbname` - the database name
|
||||
|
||||
* `user` - user name used to authenticate
|
||||
|
||||
* `password` - password used to authenticate
|
||||
|
||||
* `host` - database host address (defaults to UNIX socket if not
|
||||
provided)
|
||||
|
||||
* `port` - connection port number (defaults to 5432 if not
|
||||
provided)
|
||||
|
||||
|
||||
* `synchronous_commit`:
|
||||
Optional. Default is True. If the value is `False`, enable
|
||||
asynchronous commit and don't wait for the server to call fsync
|
||||
before ending the transaction. See:
|
||||
https://www.postgresql.org/docs/current/static/wal-async-commit.html
|
||||
|
||||
Following example illustrates the configuration file format.
|
||||
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
dbname: synapsedb
|
||||
user: synapseuser
|
||||
password: ORohmi9Eet=ohphi
|
||||
host: localhost
|
||||
synchronous_commit: false
|
||||
|
||||
## COPYRIGHT
|
||||
|
||||
This man page was written by Sunil Mohan Adapa <<sunil@medhas.org>> for
|
||||
Debian GNU/Linux distribution.
|
||||
|
||||
## SEE ALSO
|
||||
|
||||
synctl(1), hash_password(1), register_new_matrix_user(1)
|
||||
63
debian/synctl.1
vendored
63
debian/synctl.1
vendored
@@ -1,63 +0,0 @@
|
||||
.\" generated with Ronn/v0.7.3
|
||||
.\" http://github.com/rtomayko/ronn/tree/0.7.3
|
||||
.
|
||||
.TH "SYNCTL" "1" "February 2017" "" ""
|
||||
.
|
||||
.SH "NAME"
|
||||
\fBsynctl\fR \- Synapse server control interface
|
||||
.
|
||||
.SH "SYNOPSIS"
|
||||
Start, stop or restart synapse server\.
|
||||
.
|
||||
.P
|
||||
\fBsynctl\fR {start|stop|restart} [configfile] [\-w|\-\-worker=\fIWORKERCONFIG\fR] [\-a|\-\-all\-processes=\fIWORKERCONFIGDIR\fR]
|
||||
.
|
||||
.SH "DESCRIPTION"
|
||||
\fBsynctl\fR can be used to start, stop or restart Synapse server\. The control operation can be done on all processes or a single worker process\.
|
||||
.
|
||||
.SH "OPTIONS"
|
||||
.
|
||||
.TP
|
||||
\fBaction\fR
|
||||
The value of action should be one of \fBstart\fR, \fBstop\fR or \fBrestart\fR\.
|
||||
.
|
||||
.TP
|
||||
\fBconfigfile\fR
|
||||
Optional path of the configuration file to use\. Default value is \fBhomeserver\.yaml\fR\. The configuration file must exist for the operation to succeed\.
|
||||
.
|
||||
.TP
|
||||
\fB\-w\fR, \fB\-\-worker\fR:
|
||||
.
|
||||
.IP
|
||||
Perform start, stop or restart operations on a single worker\. Incompatible with \fB\-a\fR|\fB\-\-all\-processes\fR\. Value passed must be a valid worker\'s configuration file\.
|
||||
.
|
||||
.TP
|
||||
\fB\-a\fR, \fB\-\-all\-processes\fR:
|
||||
.
|
||||
.IP
|
||||
Perform start, stop or restart operations on all the workers in the given directory and the main synapse process\. Incompatible with \fB\-w\fR|\fB\-\-worker\fR\. Value passed must be a directory containing valid work configuration files\. All files ending with \fB\.yaml\fR extension shall be considered as configuration files and all other files in the directory are ignored\.
|
||||
.
|
||||
.SH "CONFIGURATION FILE"
|
||||
Configuration file may be generated as follows:
|
||||
.
|
||||
.IP "" 4
|
||||
.
|
||||
.nf
|
||||
|
||||
$ python \-B \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
|
||||
.
|
||||
.fi
|
||||
.
|
||||
.IP "" 0
|
||||
.
|
||||
.SH "ENVIRONMENT"
|
||||
.
|
||||
.TP
|
||||
\fBSYNAPSE_CACHE_FACTOR\fR
|
||||
Synapse\'s architecture is quite RAM hungry currently \- a lot of recent room data and metadata is deliberately cached in RAM in order to speed up common requests\. This will be improved in future, but for now the easiest way to either reduce the RAM usage (at the risk of slowing things down) is to set the SYNAPSE_CACHE_FACTOR environment variable\. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1\.0 will max out at around 3\-4GB of resident memory \- this is what we currently run the matrix\.org on\. The default setting is currently 0\.1, which is probably around a ~700MB footprint\. You can dial it down further to 0\.02 if desired, which targets roughly ~512MB\. Conversely you can dial it up if you need performance for lots of users and have a box with a lot of RAM\.
|
||||
.
|
||||
.SH "COPYRIGHT"
|
||||
This man page was written by Sunil Mohan Adapa <\fIsunil@medhas\.org\fR> for Debian GNU/Linux distribution\.
|
||||
.
|
||||
.SH "SEE ALSO"
|
||||
synapse_port_db(1), hash_password(1), register_new_matrix_user(1)
|
||||
70
debian/synctl.ronn
vendored
70
debian/synctl.ronn
vendored
@@ -1,70 +0,0 @@
|
||||
synctl(1) -- Synapse server control interface
|
||||
=============================================
|
||||
|
||||
## SYNOPSIS
|
||||
Start, stop or restart synapse server.
|
||||
|
||||
`synctl` {start|stop|restart} [configfile] [-w|--worker=<WORKERCONFIG>] [-a|--all-processes=<WORKERCONFIGDIR>]
|
||||
|
||||
## DESCRIPTION
|
||||
|
||||
**synctl** can be used to start, stop or restart Synapse server. The
|
||||
control operation can be done on all processes or a single worker
|
||||
process.
|
||||
|
||||
## OPTIONS
|
||||
|
||||
* `action`:
|
||||
The value of action should be one of `start`, `stop` or `restart`.
|
||||
|
||||
* `configfile`:
|
||||
Optional path of the configuration file to use. Default value is
|
||||
`homeserver.yaml`. The configuration file must exist for the
|
||||
operation to succeed.
|
||||
|
||||
* `-w`, `--worker`:
|
||||
|
||||
Perform start, stop or restart operations on a single worker.
|
||||
Incompatible with `-a`|`--all-processes`. Value passed must be a
|
||||
valid worker's configuration file.
|
||||
|
||||
* `-a`, `--all-processes`:
|
||||
|
||||
Perform start, stop or restart operations on all the workers in
|
||||
the given directory and the main synapse process. Incompatible
|
||||
with `-w`|`--worker`. Value passed must be a directory containing
|
||||
valid work configuration files. All files ending with `.yaml`
|
||||
extension shall be considered as configuration files and all other
|
||||
files in the directory are ignored.
|
||||
|
||||
## CONFIGURATION FILE
|
||||
|
||||
Configuration file may be generated as follows:
|
||||
|
||||
$ python -B -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
|
||||
|
||||
## ENVIRONMENT
|
||||
|
||||
* `SYNAPSE_CACHE_FACTOR`:
|
||||
Synapse's architecture is quite RAM hungry currently - a lot of
|
||||
recent room data and metadata is deliberately cached in RAM in
|
||||
order to speed up common requests. This will be improved in
|
||||
future, but for now the easiest way to either reduce the RAM usage
|
||||
(at the risk of slowing things down) is to set the
|
||||
SYNAPSE_CACHE_FACTOR environment variable. Roughly speaking, a
|
||||
SYNAPSE_CACHE_FACTOR of 1.0 will max out at around 3-4GB of
|
||||
resident memory - this is what we currently run the matrix.org
|
||||
on. The default setting is currently 0.1, which is probably around
|
||||
a ~700MB footprint. You can dial it down further to 0.02 if
|
||||
desired, which targets roughly ~512MB. Conversely you can dial it
|
||||
up if you need performance for lots of users and have a box with a
|
||||
lot of RAM.
|
||||
|
||||
## COPYRIGHT
|
||||
|
||||
This man page was written by Sunil Mohan Adapa <<sunil@medhas.org>> for
|
||||
Debian GNU/Linux distribution.
|
||||
|
||||
## SEE ALSO
|
||||
|
||||
synapse_port_db(1), hash_password(1), register_new_matrix_user(1)
|
||||
19
debian/templates
vendored
19
debian/templates
vendored
@@ -1,19 +0,0 @@
|
||||
Template: matrix-synapse/server-name
|
||||
Type: string
|
||||
_Description: Name of the server:
|
||||
The name that this homeserver will appear as, to clients and other
|
||||
servers via federation. This name should match the SRV record
|
||||
published in DNS.
|
||||
|
||||
Template: matrix-synapse/report-stats
|
||||
Type: boolean
|
||||
Default: false
|
||||
_Description: Report anonymous statistics?
|
||||
Developers of Matrix and Synapse really appreciate helping the
|
||||
project out by reporting anonymized usage statistics from this
|
||||
homeserver. Only very basic aggregate data (e.g. number of users)
|
||||
will be reported, but it helps track the growth of the Matrix
|
||||
community, and helps in making Matrix a success, as well as to
|
||||
convince other networks that they should peer with Matrix.
|
||||
.
|
||||
Thank you.
|
||||
@@ -1,35 +0,0 @@
|
||||
# A dockerfile which builds a docker image for building a debian package for
|
||||
# synapse. The distro to build for is passed as a docker build var.
|
||||
#
|
||||
# The default entrypoint expects the synapse source to be mounted as a
|
||||
# (read-only) volume at /synapse/source, and an output directory at /debs.
|
||||
#
|
||||
# A pair of environment variables (TARGET_USERID and TARGET_GROUPID) can be
|
||||
# passed to the docker container; if these are set, the build script will chown
|
||||
# the build products accordingly, to avoid ending up with things owned by root
|
||||
# in the host filesystem.
|
||||
|
||||
# Get the distro we want to pull from as a dynamic build variable
|
||||
ARG distro=""
|
||||
FROM ${distro}
|
||||
|
||||
# Install the build dependencies
|
||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dh-systemd \
|
||||
dh-virtualenv \
|
||||
equivs \
|
||||
lsb-release \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
wget
|
||||
|
||||
WORKDIR /synapse/source
|
||||
ENTRYPOINT ["bash","/synapse/source/docker/build_debian.sh"]
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# The script to build the Debian package, as ran inside the Docker image.
|
||||
|
||||
set -ex
|
||||
|
||||
DIST=`lsb_release -c -s`
|
||||
|
||||
# We need to build a newer dh_virtualenv on older OSes like Xenial.
|
||||
if [ "$DIST" = 'xenial' ]; then
|
||||
mkdir -p /tmp/dhvenv
|
||||
cd /tmp/dhvenv
|
||||
wget https://github.com/spotify/dh-virtualenv/archive/1.1.tar.gz
|
||||
tar xvf 1.1.tar.gz
|
||||
cd dh-virtualenv-1.1/
|
||||
env DEBIAN_FRONTEND=noninteractive mk-build-deps -ri -t "apt-get -yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io"
|
||||
dpkg-buildpackage -us -uc -b
|
||||
cd /tmp/dhvenv
|
||||
apt-get install -yqq ./dh-virtualenv_1.1-1_all.deb
|
||||
fi
|
||||
|
||||
|
||||
# we get a read-only copy of the source: make a writeable copy
|
||||
cp -aT /synapse/source /synapse/build
|
||||
cd /synapse/build
|
||||
|
||||
# add an entry to the changelog for this distribution
|
||||
dch -M -l "+$DIST" "build for $DIST"
|
||||
dch -M -r "" --force-distribution --distribution "$DIST"
|
||||
|
||||
dpkg-buildpackage -us -uc
|
||||
|
||||
ls -l ..
|
||||
|
||||
# copy the build results out, setting perms if necessary
|
||||
shopt -s nullglob
|
||||
for i in ../*.deb ../*.dsc ../*.tar.xz ../*.changes ../*.buildinfo; do
|
||||
[ -z "$TARGET_USERID" ] || chown "$TARGET_USERID" "$i"
|
||||
[ -z "$TARGET_GROUPID" ] || chgrp "$TARGET_GROUPID" "$i"
|
||||
mv "$i" /debs
|
||||
done
|
||||
@@ -1,46 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Build the Debian packages using Docker images.
|
||||
#
|
||||
# This script builds the Docker images and then executes them sequentially, each
|
||||
# one building a Debian package for the targeted operating system. It is
|
||||
# designed to be a "single command" to produce all the images.
|
||||
#
|
||||
# By default, builds for all known distributions, but a list of distributions
|
||||
# can be passed on the commandline for debugging.
|
||||
|
||||
set -ex
|
||||
|
||||
cd `dirname $0`
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
DISTS=(
|
||||
debian:stretch
|
||||
debian:buster
|
||||
debian:sid
|
||||
ubuntu:xenial
|
||||
ubuntu:bionic
|
||||
ubuntu:cosmic
|
||||
)
|
||||
else
|
||||
DISTS=("$@")
|
||||
fi
|
||||
|
||||
# Make the dir where the debs will live.
|
||||
#
|
||||
# Note that we deliberately put this outside the source tree, otherwise we tend
|
||||
# to get source packages which are full of debs. (We could hack around that
|
||||
# with more magic in the build_debian.sh script, but that doesn't solve the
|
||||
# problem for natively-run dpkg-buildpakage).
|
||||
|
||||
mkdir -p ../../debs
|
||||
|
||||
# Build each OS image;
|
||||
for i in "${DISTS[@]}"; do
|
||||
TAG=$(echo ${i} | cut -d ":" -f 2)
|
||||
docker build --tag dh-venv-builder:${TAG} --build-arg distro=${i} -f Dockerfile-dhvirtualenv .
|
||||
docker run -it --rm --volume=$(pwd)/../\:/synapse/source:ro --volume=$(pwd)/../../debs:/debs \
|
||||
-e TARGET_USERID=$(id -u) \
|
||||
-e TARGET_GROUPID=$(id -g) \
|
||||
dh-venv-builder:${TAG}
|
||||
done
|
||||
@@ -14,7 +14,6 @@ server_name: "{{ SYNAPSE_SERVER_NAME }}"
|
||||
pid_file: /homeserver.pid
|
||||
web_client: False
|
||||
soft_file_limit: 0
|
||||
log_config: "/compiled/log.config"
|
||||
|
||||
## Ports ##
|
||||
|
||||
@@ -68,6 +67,9 @@ database:
|
||||
## Performance ##
|
||||
|
||||
event_cache_size: "{{ SYNAPSE_EVENT_CACHE_SIZE or "10K" }}"
|
||||
verbose: 0
|
||||
log_file: "/data/homeserver.log"
|
||||
log_config: "/compiled/log.config"
|
||||
|
||||
## Ratelimiting ##
|
||||
|
||||
|
||||
@@ -163,7 +163,7 @@ the logcontext was set, this will make things work out ok: provided
|
||||
It's all too easy to forget to ``yield``: for instance if we forgot that
|
||||
``do_some_stuff`` returned a deferred, we might plough on regardless. This
|
||||
leads to a mess; it will probably work itself out eventually, but not before
|
||||
a load of stuff has been logged against the wrong context. (Normally, other
|
||||
a load of stuff has been logged against the wrong content. (Normally, other
|
||||
things will break, more obviously, if you forget to ``yield``, so this tends
|
||||
not to be a major problem in practice.)
|
||||
|
||||
@@ -440,59 +440,3 @@ To conclude: I think this scheme would have worked equally well, with less
|
||||
danger of messing it up, and probably made some more esoteric code easier to
|
||||
write. But again — changing the conventions of the entire Synapse codebase is
|
||||
not a sensible option for the marginal improvement offered.
|
||||
|
||||
|
||||
A note on garbage-collection of Deferred chains
|
||||
-----------------------------------------------
|
||||
|
||||
It turns out that our logcontext rules do not play nicely with Deferred
|
||||
chains which get orphaned and garbage-collected.
|
||||
|
||||
Imagine we have some code that looks like this:
|
||||
|
||||
.. code:: python
|
||||
|
||||
listener_queue = []
|
||||
|
||||
def on_something_interesting():
|
||||
for d in listener_queue:
|
||||
d.callback("foo")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def await_something_interesting():
|
||||
new_deferred = defer.Deferred()
|
||||
listener_queue.append(new_deferred)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
yield new_deferred
|
||||
|
||||
Obviously, the idea here is that we have a bunch of things which are waiting
|
||||
for an event. (It's just an example of the problem here, but a relatively
|
||||
common one.)
|
||||
|
||||
Now let's imagine two further things happen. First of all, whatever was
|
||||
waiting for the interesting thing goes away. (Perhaps the request times out,
|
||||
or something *even more* interesting happens.)
|
||||
|
||||
Secondly, let's suppose that we decide that the interesting thing is never
|
||||
going to happen, and we reset the listener queue:
|
||||
|
||||
.. code:: python
|
||||
|
||||
def reset_listener_queue():
|
||||
listener_queue.clear()
|
||||
|
||||
So, both ends of the deferred chain have now dropped their references, and the
|
||||
deferred chain is now orphaned, and will be garbage-collected at some point.
|
||||
Note that ``await_something_interesting`` is a generator function, and when
|
||||
Python garbage-collects generator functions, it gives them a chance to clean
|
||||
up by making the ``yield`` raise a ``GeneratorExit`` exception. In our case,
|
||||
that means that the ``__exit__`` handler of ``PreserveLoggingContext`` will
|
||||
carefully restore the request context, but there is now nothing waiting for
|
||||
its return, so the request context is never cleared.
|
||||
|
||||
To reiterate, this problem only arises when *both* ends of a deferred chain
|
||||
are dropped. Dropping the the reference to a deferred you're supposed to be
|
||||
calling is probably bad practice, so this doesn't actually happen too much.
|
||||
Unfortunately, when it does happen, it will lead to leaked logcontexts which
|
||||
are incredibly hard to track down.
|
||||
|
||||
@@ -27,4 +27,4 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "0.34.0.1"
|
||||
__version__ = "0.33.9"
|
||||
|
||||
@@ -188,33 +188,17 @@ class Auth(object):
|
||||
"""
|
||||
# Can optionally look elsewhere in the request (e.g. headers)
|
||||
try:
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
b"User-Agent",
|
||||
default=[b""]
|
||||
)[0].decode('ascii', 'surrogateescape')
|
||||
user_id, app_service = yield self._get_appservice_user_id(request)
|
||||
if user_id:
|
||||
request.authenticated_entity = user_id
|
||||
defer.returnValue(
|
||||
synapse.types.create_requester(user_id, app_service=app_service)
|
||||
)
|
||||
|
||||
access_token = self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
|
||||
user_id, app_service = yield self._get_appservice_user_id(request)
|
||||
if user_id:
|
||||
request.authenticated_entity = user_id
|
||||
|
||||
if ip_addr and self.hs.config.track_appservice_user_ips:
|
||||
yield self.store.insert_client_ip(
|
||||
user_id=user_id,
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id="dummy-device", # stubbed
|
||||
)
|
||||
|
||||
defer.returnValue(
|
||||
synapse.types.create_requester(user_id, app_service=app_service)
|
||||
)
|
||||
|
||||
user_info = yield self.get_user_by_access_token(access_token, rights)
|
||||
user = user_info["user"]
|
||||
token_id = user_info["token_id"]
|
||||
@@ -224,6 +208,11 @@ class Auth(object):
|
||||
# stubbed out.
|
||||
device_id = user_info.get("device_id")
|
||||
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
b"User-Agent",
|
||||
default=[b""]
|
||||
)[0].decode('ascii', 'surrogateescape')
|
||||
if user and access_token and ip_addr:
|
||||
yield self.store.insert_client_ip(
|
||||
user_id=user.to_string(),
|
||||
@@ -300,28 +289,20 @@ class Auth(object):
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
"""
|
||||
|
||||
if rights == "access":
|
||||
# first look in the database
|
||||
r = yield self._look_up_user_by_access_token(token)
|
||||
if r:
|
||||
defer.returnValue(r)
|
||||
|
||||
# otherwise it needs to be a valid macaroon
|
||||
try:
|
||||
user_id, guest = self._parse_and_validate_macaroon(token, rights)
|
||||
except _InvalidMacaroonException:
|
||||
# doesn't look like a macaroon: treat it as an opaque token which
|
||||
# must be in the database.
|
||||
# TODO: it would be nice to get rid of this, but apparently some
|
||||
# people use access tokens which aren't macaroons
|
||||
r = yield self._look_up_user_by_access_token(token)
|
||||
defer.returnValue(r)
|
||||
|
||||
try:
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
if rights == "access":
|
||||
if not guest:
|
||||
# non-guest access tokens must be in the database
|
||||
logger.warning("Unrecognised access token - not in store.")
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
|
||||
if guest:
|
||||
# Guest access tokens are not stored in the database (there can
|
||||
# only be one access token per guest, anyway).
|
||||
#
|
||||
@@ -362,15 +343,31 @@ class Auth(object):
|
||||
"device_id": None,
|
||||
}
|
||||
else:
|
||||
raise RuntimeError("Unknown rights setting %s", rights)
|
||||
# This codepath exists for several reasons:
|
||||
# * so that we can actually return a token ID, which is used
|
||||
# in some parts of the schema (where we probably ought to
|
||||
# use device IDs instead)
|
||||
# * the only way we currently have to invalidate an
|
||||
# access_token is by removing it from the database, so we
|
||||
# have to check here that it is still in the db
|
||||
# * some attributes (notably device_id) aren't stored in the
|
||||
# macaroon. They probably should be.
|
||||
# TODO: build the dictionary from the macaroon once the
|
||||
# above are fixed
|
||||
ret = yield self._look_up_user_by_access_token(token)
|
||||
if ret["user"] != user:
|
||||
logger.error(
|
||||
"Macaroon user (%s) != DB user (%s)",
|
||||
user,
|
||||
ret["user"]
|
||||
)
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"User mismatch in macaroon",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
defer.returnValue(ret)
|
||||
except (
|
||||
_InvalidMacaroonException,
|
||||
pymacaroons.exceptions.MacaroonException,
|
||||
TypeError,
|
||||
ValueError,
|
||||
) as e:
|
||||
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
|
||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
@@ -500,8 +497,11 @@ class Auth(object):
|
||||
def _look_up_user_by_access_token(self, token):
|
||||
ret = yield self.store.get_user_by_access_token(token)
|
||||
if not ret:
|
||||
defer.returnValue(None)
|
||||
|
||||
logger.warn("Unrecognised access token - not in store.")
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
# we use ret.get() below because *lots* of unit tests stub out
|
||||
# get_user_by_access_token in a way where it only returns a couple of
|
||||
# the fields.
|
||||
|
||||
@@ -54,7 +54,7 @@ from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.module_api import ModuleApi
|
||||
from synapse.python_dependencies import check_requirements
|
||||
from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, check_requirements
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
|
||||
from synapse.rest import ClientRestResource
|
||||
@@ -79,6 +79,36 @@ def gz_wrap(r):
|
||||
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
||||
|
||||
|
||||
def build_resource_for_web_client(hs):
|
||||
webclient_path = hs.get_config().web_client_location
|
||||
if not webclient_path:
|
||||
try:
|
||||
import syweb
|
||||
except ImportError:
|
||||
quit_with_error(
|
||||
"Could not find a webclient.\n\n"
|
||||
"Please either install the matrix-angular-sdk or configure\n"
|
||||
"the location of the source to serve via the configuration\n"
|
||||
"option `web_client_location`\n\n"
|
||||
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
||||
" pip install '%(dep)s'\n"
|
||||
"\n"
|
||||
"You can also disable hosting of the webclient via the\n"
|
||||
"configuration option `web_client`\n"
|
||||
% {"dep": CONDITIONAL_REQUIREMENTS["web_client"].keys()[0]}
|
||||
)
|
||||
syweb_path = os.path.dirname(syweb.__file__)
|
||||
webclient_path = os.path.join(syweb_path, "webclient")
|
||||
# GZip is disabled here due to
|
||||
# https://twistedmatrix.com/trac/ticket/7678
|
||||
# (It can stay enabled for the API resources: they call
|
||||
# write() with the whole body and then finish() straight
|
||||
# after and so do not trigger the bug.
|
||||
# GzipFile was removed in commit 184ba09
|
||||
# return GzipFile(webclient_path) # TODO configurable?
|
||||
return File(webclient_path) # TODO configurable?
|
||||
|
||||
|
||||
class SynapseHomeServer(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
@@ -107,11 +137,8 @@ class SynapseHomeServer(HomeServer):
|
||||
handler = handler_cls(config, module_api)
|
||||
resources[path] = AdditionalResource(self, handler.handle_request)
|
||||
|
||||
# try to find something useful to redirect '/' to
|
||||
if WEB_CLIENT_PREFIX in resources:
|
||||
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
||||
elif STATIC_PREFIX in resources:
|
||||
root_resource = RootRedirect(STATIC_PREFIX)
|
||||
else:
|
||||
root_resource = NoResource()
|
||||
|
||||
@@ -210,16 +237,7 @@ class SynapseHomeServer(HomeServer):
|
||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||
|
||||
if name == "webclient":
|
||||
webclient_path = self.get_config().web_client_location
|
||||
|
||||
if webclient_path is None:
|
||||
logger.warning(
|
||||
"Not enabling webclient resource, as web_client_location is unset."
|
||||
)
|
||||
else:
|
||||
# GZip is disabled here due to
|
||||
# https://twistedmatrix.com/trac/ticket/7678
|
||||
resources[WEB_CLIENT_PREFIX] = File(webclient_path)
|
||||
resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
|
||||
|
||||
if name == "metrics" and self.get_config().enable_metrics:
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
|
||||
@@ -53,8 +53,8 @@ import logging
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.appservice import ApplicationServiceState
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.util.logcontext import run_in_background
|
||||
from synapse.util.metrics import Measure
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -104,23 +104,14 @@ class _ServiceQueuer(object):
|
||||
self.clock = clock
|
||||
|
||||
def enqueue(self, service, event):
|
||||
# if this service isn't being sent something
|
||||
self.queued_events.setdefault(service.id, []).append(event)
|
||||
|
||||
# start a sender for this appservice if we don't already have one
|
||||
|
||||
if service.id in self.requests_in_flight:
|
||||
return
|
||||
|
||||
run_as_background_process(
|
||||
"as-sender-%s" % (service.id, ),
|
||||
self._send_request, service,
|
||||
)
|
||||
run_in_background(self._send_request, service)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _send_request(self, service):
|
||||
# sanity-check: we shouldn't get here if this service already has a sender
|
||||
# running.
|
||||
assert(service.id not in self.requests_in_flight)
|
||||
if service.id in self.requests_in_flight:
|
||||
return
|
||||
|
||||
self.requests_in_flight.add(service.id)
|
||||
try:
|
||||
@@ -128,10 +119,12 @@ class _ServiceQueuer(object):
|
||||
events = self.queued_events.pop(service.id, [])
|
||||
if not events:
|
||||
return
|
||||
try:
|
||||
yield self.txn_ctrl.send(service, events)
|
||||
except Exception:
|
||||
logger.exception("AS request failed")
|
||||
|
||||
with Measure(self.clock, "servicequeuer.send"):
|
||||
try:
|
||||
yield self.txn_ctrl.send(service, events)
|
||||
except Exception:
|
||||
logger.exception("AS request failed")
|
||||
finally:
|
||||
self.requests_in_flight.discard(service.id)
|
||||
|
||||
@@ -230,12 +223,7 @@ class _Recoverer(object):
|
||||
self.backoff_counter = 1
|
||||
|
||||
def recover(self):
|
||||
def _retry():
|
||||
run_as_background_process(
|
||||
"as-recoverer-%s" % (self.service.id,),
|
||||
self.retry,
|
||||
)
|
||||
self.clock.call_later((2 ** self.backoff_counter), _retry)
|
||||
self.clock.call_later((2 ** self.backoff_counter), self.retry)
|
||||
|
||||
def _backoff(self):
|
||||
# cap the backoff to be around 8.5min => (2^9) = 512 secs
|
||||
|
||||
@@ -33,16 +33,11 @@ class AppServiceConfig(Config):
|
||||
def read_config(self, config):
|
||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||
self.notify_appservices = config.get("notify_appservices", True)
|
||||
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
|
||||
|
||||
def default_config(cls, **kwargs):
|
||||
return """\
|
||||
# A list of application service config file to use
|
||||
app_service_config_files: []
|
||||
|
||||
# Whether or not to track application service IP addresses. Implicitly
|
||||
# enables MAU tracking for application service users.
|
||||
track_appservice_user_ips: False
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -57,8 +57,8 @@ class KeyConfig(Config):
|
||||
# Unfortunately, there are people out there that don't have this
|
||||
# set. Lets just be "nice" and derive one from their secret key.
|
||||
logger.warn("Config is missing missing macaroon_secret_key")
|
||||
seed = bytes(self.signing_key[0])
|
||||
self.macaroon_secret_key = hashlib.sha256(seed).digest()
|
||||
seed = self.signing_key[0].seed
|
||||
self.macaroon_secret_key = hashlib.sha256(seed)
|
||||
|
||||
self.expire_access_token = config.get("expire_access_token", False)
|
||||
|
||||
@@ -71,14 +71,14 @@ class KeyConfig(Config):
|
||||
base_key_name = os.path.join(config_dir_path, server_name)
|
||||
|
||||
if is_generating_file:
|
||||
macaroon_secret_key = '"%s"' % random_string_with_symbols(50)
|
||||
macaroon_secret_key = random_string_with_symbols(50)
|
||||
form_secret = '"%s"' % random_string_with_symbols(50)
|
||||
else:
|
||||
macaroon_secret_key = 'null'
|
||||
macaroon_secret_key = None
|
||||
form_secret = 'null'
|
||||
|
||||
return """\
|
||||
macaroon_secret_key: %(macaroon_secret_key)s
|
||||
macaroon_secret_key: "%(macaroon_secret_key)s"
|
||||
|
||||
# Used to enable access token expiration.
|
||||
expire_access_token: False
|
||||
|
||||
@@ -34,6 +34,7 @@ class ServerConfig(Config):
|
||||
raise ConfigError(str(e))
|
||||
|
||||
self.pid_file = self.abspath(config.get("pid_file"))
|
||||
self.web_client = config["web_client"]
|
||||
self.web_client_location = config.get("web_client_location", None)
|
||||
self.soft_file_limit = config["soft_file_limit"]
|
||||
self.daemonize = config.get("daemonize")
|
||||
@@ -61,11 +62,6 @@ class ServerConfig(Config):
|
||||
# master, potentially causing inconsistency.
|
||||
self.enable_media_repo = config.get("enable_media_repo", True)
|
||||
|
||||
# whether to enable search. If disabled, new entries will not be inserted
|
||||
# into the search tables and they will not be indexed. Users will receive
|
||||
# errors when attempting to search for messages.
|
||||
self.enable_search = config.get("enable_search", True)
|
||||
|
||||
self.filter_timeline_limit = config.get("filter_timeline_limit", -1)
|
||||
|
||||
# Whether we should block invites sent to users on this server
|
||||
@@ -127,9 +123,6 @@ class ServerConfig(Config):
|
||||
elif not bind_addresses:
|
||||
bind_addresses.append('')
|
||||
|
||||
if not self.web_client_location:
|
||||
_warn_if_webclient_configured(self.listeners)
|
||||
|
||||
self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None))
|
||||
|
||||
bind_port = config.get("bind_port")
|
||||
@@ -138,6 +131,8 @@ class ServerConfig(Config):
|
||||
bind_host = config.get("bind_host", "")
|
||||
gzip_responses = config.get("gzip_responses", True)
|
||||
|
||||
names = ["client", "webclient"] if self.web_client else ["client"]
|
||||
|
||||
self.listeners.append({
|
||||
"port": bind_port,
|
||||
"bind_addresses": [bind_host],
|
||||
@@ -145,7 +140,7 @@ class ServerConfig(Config):
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{
|
||||
"names": ["client"],
|
||||
"names": names,
|
||||
"compress": gzip_responses,
|
||||
},
|
||||
{
|
||||
@@ -164,7 +159,7 @@ class ServerConfig(Config):
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{
|
||||
"names": ["client"],
|
||||
"names": names,
|
||||
"compress": gzip_responses,
|
||||
},
|
||||
{
|
||||
@@ -247,9 +242,13 @@ class ServerConfig(Config):
|
||||
#
|
||||
# cpu_affinity: 0xFFFFFFFF
|
||||
|
||||
# The path to the web client which will be served at /_matrix/client/
|
||||
# if 'webclient' is configured under the 'listeners' configuration.
|
||||
#
|
||||
# Whether to serve a web client from the HTTP/HTTPS root resource.
|
||||
web_client: True
|
||||
|
||||
# The root directory to server for the above web client.
|
||||
# If left undefined, synapse will serve the matrix-angular-sdk web client.
|
||||
# Make sure matrix-angular-sdk is installed with pip if web_client is True
|
||||
# and web_client_location is undefined
|
||||
# web_client_location: "/path/to/web/root"
|
||||
|
||||
# The public-facing base URL for the client API (not including _matrix/...)
|
||||
@@ -316,8 +315,8 @@ class ServerConfig(Config):
|
||||
-
|
||||
# List of resources to host on this listener.
|
||||
names:
|
||||
- client # The client-server APIs, both v1 and v2
|
||||
# - webclient # A web client. Requires web_client_location to be set.
|
||||
- client # The client-server APIs, both v1 and v2
|
||||
- webclient # The bundled webclient.
|
||||
|
||||
# Should synapse compress HTTP responses to clients that support it?
|
||||
# This should be disabled if running synapse behind a load balancer
|
||||
@@ -344,7 +343,7 @@ class ServerConfig(Config):
|
||||
x_forwarded: false
|
||||
|
||||
resources:
|
||||
- names: [client]
|
||||
- names: [client, webclient]
|
||||
compress: true
|
||||
- names: [federation]
|
||||
compress: false
|
||||
@@ -385,12 +384,7 @@ class ServerConfig(Config):
|
||||
# mau_limit_reserved_threepids:
|
||||
# - medium: 'email'
|
||||
# address: 'reserved_user@example.com'
|
||||
#
|
||||
# Room searching
|
||||
#
|
||||
# If disabled, new messages will not be indexed for searching and users
|
||||
# will receive errors when searching for messages. Defaults to enabled.
|
||||
# enable_search: true
|
||||
|
||||
""" % locals()
|
||||
|
||||
def read_arguments(self, args):
|
||||
@@ -448,19 +442,3 @@ def read_gc_thresholds(thresholds):
|
||||
raise ConfigError(
|
||||
"Value of `gc_threshold` must be a list of three integers if set"
|
||||
)
|
||||
|
||||
|
||||
NO_MORE_WEB_CLIENT_WARNING = """
|
||||
Synapse no longer includes a web client. To enable a web client, configure
|
||||
web_client_location. To remove this warning, remove 'webclient' from the 'listeners'
|
||||
configuration.
|
||||
"""
|
||||
|
||||
|
||||
def _warn_if_webclient_configured(listeners):
|
||||
for listener in listeners:
|
||||
for res in listener.get("resources", []):
|
||||
for name in res.get("names", []):
|
||||
if name == 'webclient':
|
||||
logger.warning(NO_MORE_WEB_CLIENT_WARNING)
|
||||
return
|
||||
|
||||
@@ -259,10 +259,8 @@ class DirectoryHandler(BaseHandler):
|
||||
servers = result["servers"]
|
||||
|
||||
if not room_id:
|
||||
raise SynapseError(
|
||||
404,
|
||||
raise NotFoundError(
|
||||
"Room alias %s not found" % (room_alias.to_string(),),
|
||||
Codes.NOT_FOUND
|
||||
)
|
||||
|
||||
users = yield self.state.get_current_user_in_room(room_id)
|
||||
@@ -302,10 +300,8 @@ class DirectoryHandler(BaseHandler):
|
||||
"servers": result.servers,
|
||||
})
|
||||
else:
|
||||
raise SynapseError(
|
||||
404,
|
||||
raise NotFoundError(
|
||||
"Room alias %r not found" % (room_alias.to_string(),),
|
||||
Codes.NOT_FOUND
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
||||
@@ -48,13 +48,14 @@ from synapse.crypto.event_signing import (
|
||||
compute_event_signature,
|
||||
)
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.replication.http.federation import (
|
||||
ReplicationCleanRoomRestServlet,
|
||||
ReplicationFederationSendEventsRestServlet,
|
||||
)
|
||||
from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
|
||||
from synapse.state import StateResolutionStore, resolve_events_with_store
|
||||
from synapse.types import UserID, get_domain_from_id
|
||||
from synapse.types import UserID, create_requester, get_domain_from_id
|
||||
from synapse.util import logcontext, unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_joined_room
|
||||
@@ -105,6 +106,7 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
self.hs = hs
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
self.store = hs.get_datastore() # type: synapse.storage.DataStore
|
||||
self.federation_client = hs.get_federation_client()
|
||||
self.state_handler = hs.get_state_handler()
|
||||
@@ -1300,8 +1302,37 @@ class FederationHandler(BaseHandler):
|
||||
context = yield self.state_handler.compute_event_context(event)
|
||||
yield self.persist_events_and_notify([(event, context)])
|
||||
|
||||
sender = UserID.from_string(event.sender)
|
||||
target = UserID.from_string(event.state_key)
|
||||
if (sender.localpart == target.localpart):
|
||||
run_as_background_process(
|
||||
"_auto_accept_invite",
|
||||
self._auto_accept_invite,
|
||||
sender, target, event.room_id,
|
||||
)
|
||||
|
||||
defer.returnValue(event)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _auto_accept_invite(self, sender, target, room_id):
|
||||
joined = False
|
||||
for attempt in range(0, 10):
|
||||
try:
|
||||
yield self.hs.get_room_member_handler().update_membership(
|
||||
requester=create_requester(target.to_string()),
|
||||
target=target,
|
||||
room_id=room_id,
|
||||
action="join",
|
||||
)
|
||||
joined = True
|
||||
break
|
||||
except Exception:
|
||||
# We're going to retry, but we should log the error
|
||||
logger.exception("Error auto-accepting invite on attempt %d" % attempt)
|
||||
yield self.clock.sleep(1)
|
||||
if not joined:
|
||||
logger.error("Giving up on trying to auto-accept invite: too many attempts")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
|
||||
origin, event = yield self._make_and_verify_event(
|
||||
|
||||
@@ -217,19 +217,7 @@ class RegistrationHandler(BaseHandler):
|
||||
user_id = None
|
||||
token = None
|
||||
attempts += 1
|
||||
if not self.hs.config.user_consent_at_registration:
|
||||
yield self._auto_join_rooms(user_id)
|
||||
|
||||
defer.returnValue((user_id, token))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _auto_join_rooms(self, user_id):
|
||||
"""Automatically joins users to auto join rooms - creating the room in the first place
|
||||
if the user is the first to be created.
|
||||
|
||||
Args:
|
||||
user_id(str): The user to join
|
||||
"""
|
||||
# auto-join the user to any rooms we're supposed to dump them into
|
||||
fake_requester = create_requester(user_id)
|
||||
|
||||
@@ -238,6 +226,7 @@ class RegistrationHandler(BaseHandler):
|
||||
if self.hs.config.autocreate_auto_join_rooms:
|
||||
count = yield self.store.count_all_users()
|
||||
should_auto_create_rooms = count == 1
|
||||
|
||||
for r in self.hs.config.auto_join_rooms:
|
||||
try:
|
||||
if should_auto_create_rooms:
|
||||
@@ -267,15 +256,7 @@ class RegistrationHandler(BaseHandler):
|
||||
except Exception as e:
|
||||
logger.error("Failed to join new user to %r: %r", r, e)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def post_consent_actions(self, user_id):
|
||||
"""A series of registration actions that can only be carried out once consent
|
||||
has been granted
|
||||
|
||||
Args:
|
||||
user_id (str): The user to join
|
||||
"""
|
||||
yield self._auto_join_rooms(user_id)
|
||||
defer.returnValue((user_id, token))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def appservice_register(self, user_localpart, as_token):
|
||||
|
||||
@@ -28,8 +28,9 @@ from twisted.internet import defer
|
||||
import synapse.server
|
||||
import synapse.types
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||
from synapse.types import RoomID, UserID
|
||||
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
|
||||
from synapse.types import RoomAlias, RoomID, UserID
|
||||
from synapse.util import logcontext
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_joined_room, user_left_room
|
||||
|
||||
@@ -416,6 +417,10 @@ class RoomMemberHandler(object):
|
||||
ret = yield self._remote_join(
|
||||
requester, remote_room_hosts, room_id, target, content
|
||||
)
|
||||
logcontext.run_in_background(
|
||||
self._send_merged_user_invites,
|
||||
requester, room_id,
|
||||
)
|
||||
defer.returnValue(ret)
|
||||
|
||||
elif effective_membership_state == Membership.LEAVE:
|
||||
@@ -450,8 +455,58 @@ class RoomMemberHandler(object):
|
||||
prev_events_and_hashes=prev_events_and_hashes,
|
||||
content=content,
|
||||
)
|
||||
if effective_membership_state == Membership.JOIN:
|
||||
logcontext.run_in_background(
|
||||
self._send_merged_user_invites,
|
||||
requester, room_id,
|
||||
)
|
||||
defer.returnValue(res)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _send_merged_user_invites(self, requester, room_id):
|
||||
try:
|
||||
profile_alias = "#_profile_%s:%s" % (
|
||||
requester.user.localpart, self.hs.hostname,
|
||||
)
|
||||
profile_alias = RoomAlias.from_string(profile_alias)
|
||||
try:
|
||||
profile_room_id, remote_room_hosts = yield self.lookup_room_alias(
|
||||
profile_alias,
|
||||
)
|
||||
except NotFoundError:
|
||||
logger.info(
|
||||
"Not sending merged invites as %s does not exists",
|
||||
profile_alias
|
||||
)
|
||||
return
|
||||
|
||||
linked_accounts = yield self.state_handler.get_current_state(
|
||||
room_id=profile_room_id.to_string(),
|
||||
event_type="m.linked_accounts",
|
||||
state_key="",
|
||||
)
|
||||
if not linked_accounts or not linked_accounts.content['all_children']:
|
||||
return
|
||||
for child_id in linked_accounts.content['all_children']:
|
||||
child = UserID.from_string(child_id)
|
||||
if self.hs.is_mine(child) or child_id == requester.user.to_string():
|
||||
# TODO: Handle auto-invite for local users (not a priority)
|
||||
continue
|
||||
try:
|
||||
yield self.update_membership(
|
||||
requester=requester,
|
||||
target=child,
|
||||
room_id=room_id,
|
||||
action="invite",
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to invite %s to %s", child_id, room_id)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to send invites to children of %s in %s",
|
||||
requester.user.to_string(), room_id,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_membership_event(
|
||||
self,
|
||||
@@ -578,7 +633,7 @@ class RoomMemberHandler(object):
|
||||
mapping = yield directory_handler.get_association(room_alias)
|
||||
|
||||
if not mapping:
|
||||
raise SynapseError(404, "No such room alias")
|
||||
raise NotFoundError("No such room alias")
|
||||
|
||||
room_id = mapping["room_id"]
|
||||
servers = mapping["servers"]
|
||||
|
||||
@@ -50,9 +50,6 @@ class SearchHandler(BaseHandler):
|
||||
dict to be returned to the client with results of search
|
||||
"""
|
||||
|
||||
if not self.hs.config.enable_search:
|
||||
raise SynapseError(400, "Search is disabled on this homeserver")
|
||||
|
||||
batch_group = None
|
||||
batch_group_key = None
|
||||
batch_token = None
|
||||
|
||||
@@ -69,6 +69,9 @@ REQUIREMENTS = {
|
||||
}
|
||||
|
||||
CONDITIONAL_REQUIREMENTS = {
|
||||
"web_client": {
|
||||
"matrix_angular_sdk>=0.6.8": ["syweb>=0.6.8"],
|
||||
},
|
||||
"email.enable_notifs": {
|
||||
"Jinja2>=2.8": ["Jinja2>=2.8"],
|
||||
"bleach>=1.4.2": ["bleach>=1.4.2"],
|
||||
@@ -78,7 +81,7 @@ CONDITIONAL_REQUIREMENTS = {
|
||||
},
|
||||
"postgres": {
|
||||
"psycopg2>=2.6": ["psycopg2"]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ from twisted.web.client import PartialDownloadError
|
||||
|
||||
from synapse.api.errors import Codes, LoginError, SynapseError
|
||||
from synapse.http.server import finish_request
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.servlet import parse_json_object_from_request
|
||||
from synapse.types import UserID
|
||||
from synapse.util.msisdn import phone_number_to_msisdn
|
||||
|
||||
@@ -83,7 +83,6 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||
PATTERNS = client_path_patterns("/login$")
|
||||
SAML2_TYPE = "m.login.saml2"
|
||||
CAS_TYPE = "m.login.cas"
|
||||
SSO_TYPE = "m.login.sso"
|
||||
TOKEN_TYPE = "m.login.token"
|
||||
JWT_TYPE = "m.login.jwt"
|
||||
|
||||
@@ -106,10 +105,6 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||
if self.saml2_enabled:
|
||||
flows.append({"type": LoginRestServlet.SAML2_TYPE})
|
||||
if self.cas_enabled:
|
||||
flows.append({"type": LoginRestServlet.SSO_TYPE})
|
||||
|
||||
# we advertise CAS for backwards compat, though MSC1721 renamed it
|
||||
# to SSO.
|
||||
flows.append({"type": LoginRestServlet.CAS_TYPE})
|
||||
|
||||
# While its valid for us to advertise this login type generally,
|
||||
@@ -389,11 +384,11 @@ class SAML2RestServlet(ClientV1RestServlet):
|
||||
defer.returnValue((200, {"status": "not_authenticated"}))
|
||||
|
||||
|
||||
class CasRedirectServlet(RestServlet):
|
||||
PATTERNS = client_path_patterns("/login/(cas|sso)/redirect")
|
||||
class CasRedirectServlet(ClientV1RestServlet):
|
||||
PATTERNS = client_path_patterns("/login/cas/redirect", releases=())
|
||||
|
||||
def __init__(self, hs):
|
||||
super(CasRedirectServlet, self).__init__()
|
||||
super(CasRedirectServlet, self).__init__(hs)
|
||||
self.cas_server_url = hs.config.cas_server_url.encode('ascii')
|
||||
self.cas_service_url = hs.config.cas_service_url.encode('ascii')
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request):
|
||||
spec = _rule_spec_from_path([x.decode('utf8') for x in request.postpath])
|
||||
spec = _rule_spec_from_path(request.postpath)
|
||||
try:
|
||||
priority_class = _priority_class_from_spec(spec)
|
||||
except InvalidRuleException as e:
|
||||
@@ -103,7 +103,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_DELETE(self, request):
|
||||
spec = _rule_spec_from_path([x.decode('utf8') for x in request.postpath])
|
||||
spec = _rule_spec_from_path(request.postpath)
|
||||
|
||||
requester = yield self.auth.get_user_by_req(request)
|
||||
user_id = requester.user.to_string()
|
||||
@@ -134,7 +134,7 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||
|
||||
rules = format_push_rules_for_user(requester.user, rules)
|
||||
|
||||
path = [x.decode('utf8') for x in request.postpath][1:]
|
||||
path = request.postpath[1:]
|
||||
|
||||
if path == []:
|
||||
# we're a reference impl: pedantry is our job.
|
||||
@@ -142,10 +142,11 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||
PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
|
||||
)
|
||||
|
||||
if path[0] == '':
|
||||
if path[0] == b'':
|
||||
defer.returnValue((200, rules))
|
||||
elif path[0] == 'global':
|
||||
result = _filter_ruleset_with_path(rules['global'], path[1:])
|
||||
elif path[0] == b'global':
|
||||
path = [x.decode('ascii') for x in path[1:]]
|
||||
result = _filter_ruleset_with_path(rules['global'], path)
|
||||
defer.returnValue((200, result))
|
||||
else:
|
||||
raise UnrecognizedRequestError()
|
||||
@@ -189,24 +190,12 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||
|
||||
|
||||
def _rule_spec_from_path(path):
|
||||
"""Turn a sequence of path components into a rule spec
|
||||
|
||||
Args:
|
||||
path (sequence[unicode]): the URL path components.
|
||||
|
||||
Returns:
|
||||
dict: rule spec dict, containing scope/template/rule_id entries,
|
||||
and possibly attr.
|
||||
|
||||
Raises:
|
||||
UnrecognizedRequestError if the path components cannot be parsed.
|
||||
"""
|
||||
if len(path) < 2:
|
||||
raise UnrecognizedRequestError()
|
||||
if path[0] != 'pushrules':
|
||||
if path[0] != b'pushrules':
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
scope = path[1]
|
||||
scope = path[1].decode('ascii')
|
||||
path = path[2:]
|
||||
if scope != 'global':
|
||||
raise UnrecognizedRequestError()
|
||||
@@ -214,13 +203,13 @@ def _rule_spec_from_path(path):
|
||||
if len(path) == 0:
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
template = path[0]
|
||||
template = path[0].decode('ascii')
|
||||
path = path[1:]
|
||||
|
||||
if len(path) == 0 or len(path[0]) == 0:
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
rule_id = path[0]
|
||||
rule_id = path[0].decode('ascii')
|
||||
|
||||
spec = {
|
||||
'scope': scope,
|
||||
@@ -231,7 +220,7 @@ def _rule_spec_from_path(path):
|
||||
path = path[1:]
|
||||
|
||||
if len(path) > 0 and len(path[0]) > 0:
|
||||
spec['attr'] = path[0]
|
||||
spec['attr'] = path[0].decode('ascii')
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ class PushersRemoveRestServlet(RestServlet):
|
||||
To allow pusher to be delete by clicking a link (ie. GET request)
|
||||
"""
|
||||
PATTERNS = client_path_patterns("/pushers/remove$")
|
||||
SUCCESS_HTML = b"<html><body>You have been unsubscribed</body><html>"
|
||||
SUCCESS_HTML = "<html><body>You have been unsubscribed</body><html>"
|
||||
|
||||
def __init__(self, hs):
|
||||
super(PushersRemoveRestServlet, self).__init__()
|
||||
|
||||
@@ -457,7 +457,6 @@ class RegisterRestServlet(RestServlet):
|
||||
yield self.store.user_set_consent_version(
|
||||
registered_user_id, self.hs.config.user_consent_version,
|
||||
)
|
||||
yield self.registration_handler.post_consent_actions(registered_user_id)
|
||||
|
||||
defer.returnValue((200, return_dict))
|
||||
|
||||
|
||||
@@ -89,7 +89,6 @@ class ConsentResource(Resource):
|
||||
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
self.registration_handler = hs.get_handlers().registration_handler
|
||||
|
||||
# this is required by the request_handler wrapper
|
||||
self.clock = hs.get_clock()
|
||||
@@ -200,7 +199,6 @@ class ConsentResource(Resource):
|
||||
if e.code != 404:
|
||||
raise
|
||||
raise NotFoundError("Unknown user")
|
||||
yield self.registration_handler.post_consent_actions(qualified_user_id)
|
||||
|
||||
try:
|
||||
self._render_template(request, "success.html")
|
||||
|
||||
@@ -48,8 +48,7 @@ class DownloadResource(Resource):
|
||||
set_cors_headers(request)
|
||||
request.setHeader(
|
||||
b"Content-Security-Policy",
|
||||
b"sandbox;"
|
||||
b" default-src 'none';"
|
||||
b"default-src 'none';"
|
||||
b" script-src 'none';"
|
||||
b" plugin-types application/pdf;"
|
||||
b" style-src 'unsafe-inline';"
|
||||
|
||||
@@ -298,8 +298,6 @@ def _resolve_normal_events(events, auth_events):
|
||||
|
||||
def _ordered_events(events):
|
||||
def key_func(e):
|
||||
# we have to use utf-8 rather than ascii here because it turns out we allow
|
||||
# people to send us events with non-ascii event IDs :/
|
||||
return -int(e.depth), hashlib.sha1(e.event_id.encode('utf-8')).hexdigest()
|
||||
return -int(e.depth), hashlib.sha1(e.event_id.encode('ascii')).hexdigest()
|
||||
|
||||
return sorted(events, key=key_func)
|
||||
|
||||
@@ -12,30 +12,35 @@
|
||||
<h1>Log in with one of the following methods</h1>
|
||||
|
||||
<span id="feedback" style="color: #f00"></span>
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
<div id="loading">
|
||||
<img src="spinner.gif" />
|
||||
</div>
|
||||
|
||||
<div id="sso_flow" class="login_flow" style="display:none">
|
||||
Single-sign on:
|
||||
<form id="sso_form" action="/_matrix/client/r0/login/sso/redirect" method="get">
|
||||
<input id="sso_redirect_url" type="hidden" name="redirectUrl" value=""/>
|
||||
<input type="submit" value="Log in"/>
|
||||
</form>
|
||||
<div id="cas_flow" class="login_flow" style="display:none"
|
||||
onclick="gotoCas(); return false;">
|
||||
CAS Authentication: <button id="cas_button" style="margin: 10px">Log in</button>
|
||||
</div>
|
||||
|
||||
<div id="password_flow" class="login_flow" style="display:none">
|
||||
Password Authentication:
|
||||
<form onsubmit="matrixLogin.password_login(); return false;">
|
||||
<input id="user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
|
||||
<br/>
|
||||
<input id="password" size="32" type="password" placeholder="Password"/>
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
<input type="submit" value="Log in"/>
|
||||
</form>
|
||||
</div>
|
||||
<form id="password_form" class="login_flow" style="display:none"
|
||||
onsubmit="matrixLogin.password_login(); return false;">
|
||||
<div>
|
||||
Password Authentication:<br/>
|
||||
|
||||
<div style="text-align: center">
|
||||
<input id="user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
|
||||
<br/>
|
||||
<input id="password" size="32" type="password" placeholder="Password"/>
|
||||
<br/>
|
||||
|
||||
<button type="submit" style="margin: 10px">Log in</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div id="no_login_types" type="button" class="login_flow" style="display:none">
|
||||
Log in currently unavailable.
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
window.matrixLogin = {
|
||||
endpoint: location.origin + "/_matrix/client/r0/login",
|
||||
endpoint: location.origin + "/_matrix/client/api/v1/login",
|
||||
serverAcceptsPassword: false,
|
||||
serverAcceptsCas: false,
|
||||
serverAcceptsSso: false,
|
||||
serverAcceptsCas: false
|
||||
};
|
||||
|
||||
var submitPassword = function(user, pwd) {
|
||||
@@ -41,6 +40,12 @@ var errorFunc = function(err) {
|
||||
}
|
||||
};
|
||||
|
||||
var gotoCas = function() {
|
||||
var this_page = window.location.origin + window.location.pathname;
|
||||
var redirect_url = matrixLogin.endpoint + "/cas/redirect?redirectUrl=" + encodeURIComponent(this_page);
|
||||
window.location.replace(redirect_url);
|
||||
}
|
||||
|
||||
var setFeedbackString = function(text) {
|
||||
$("#feedback").text(text);
|
||||
};
|
||||
@@ -48,18 +53,12 @@ var setFeedbackString = function(text) {
|
||||
var show_login = function() {
|
||||
$("#loading").hide();
|
||||
|
||||
var this_page = window.location.origin + window.location.pathname;
|
||||
$("#sso_redirect_url").val(encodeURIComponent(this_page));
|
||||
|
||||
if (matrixLogin.serverAcceptsPassword) {
|
||||
$("#password_flow").show();
|
||||
$("#password_form").show();
|
||||
}
|
||||
|
||||
if (matrixLogin.serverAcceptsSso) {
|
||||
$("#sso_flow").show();
|
||||
} else if (matrixLogin.serverAcceptsCas) {
|
||||
$("#sso_form").attr("action", "/_matrix/client/r0/login/cas/redirect");
|
||||
$("#sso_flow").show();
|
||||
if (matrixLogin.serverAcceptsCas) {
|
||||
$("#cas_flow").show();
|
||||
}
|
||||
|
||||
if (!matrixLogin.serverAcceptsPassword && !matrixLogin.serverAcceptsCas) {
|
||||
@@ -68,8 +67,8 @@ var show_login = function() {
|
||||
};
|
||||
|
||||
var show_spinner = function() {
|
||||
$("#password_flow").hide();
|
||||
$("#sso_flow").hide();
|
||||
$("#password_form").hide();
|
||||
$("#cas_flow").hide();
|
||||
$("#no_login_types").hide();
|
||||
$("#loading").show();
|
||||
};
|
||||
@@ -85,10 +84,7 @@ var fetch_info = function(cb) {
|
||||
matrixLogin.serverAcceptsCas = true;
|
||||
console.log("Server accepts CAS");
|
||||
}
|
||||
if ("m.login.sso" === flow.type) {
|
||||
matrixLogin.serverAcceptsSso = true;
|
||||
console.log("Server accepts SSO");
|
||||
}
|
||||
|
||||
if ("m.login.password" === flow.type) {
|
||||
matrixLogin.serverAcceptsPassword = true;
|
||||
console.log("Server accepts password");
|
||||
|
||||
@@ -19,23 +19,30 @@ a:hover { color: #000; }
|
||||
a:active { color: #000; }
|
||||
|
||||
input {
|
||||
width: 90%
|
||||
}
|
||||
|
||||
textarea, input {
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
margin: 5px;
|
||||
}
|
||||
|
||||
textbox, input[type="text"], input[type="password"] {
|
||||
width: 90%;
|
||||
.smallPrint {
|
||||
color: #888;
|
||||
font-size: 9pt ! important;
|
||||
font-style: italic ! important;
|
||||
}
|
||||
|
||||
form {
|
||||
text-align: center;
|
||||
margin: 10px 0 0 0;
|
||||
.g-recaptcha div {
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
.login_flow {
|
||||
width: 300px;
|
||||
text-align: left;
|
||||
padding: 10px;
|
||||
margin-bottom: 40px;
|
||||
display: inline-block;
|
||||
|
||||
-webkit-border-radius: 10px;
|
||||
-moz-border-radius: 10px;
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>Synapse is running</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,Oxygen-Sans,Ubuntu,Cantarell,"Helvetica Neue",sans-serif;
|
||||
max-width: 40em;
|
||||
margin: auto;
|
||||
text-align: center;
|
||||
}
|
||||
h1, p {
|
||||
margin: 1.5em;
|
||||
}
|
||||
hr {
|
||||
border: none;
|
||||
background-color: #ccc;
|
||||
color: #ccc;
|
||||
height: 1px;
|
||||
width: 7em;
|
||||
margin-top: 4em;
|
||||
}
|
||||
.logo {
|
||||
display: block;
|
||||
width: 12em;
|
||||
margin: 4em auto;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="logo">
|
||||
<svg role="img" aria-label="[Matrix logo]" viewBox="0 0 200 85" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="parent" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="child" transform="translate(-122.000000, -6.000000)" fill="#000000" fill-rule="nonzero">
|
||||
<g id="matrix-logo" transform="translate(122.000000, 6.000000)">
|
||||
<polygon id="left-bracket" points="2.24708861 1.93811009 2.24708861 82.7268844 8.10278481 82.7268844 8.10278481 84.6652459 0 84.6652459 0 0 8.10278481 0 8.10278481 1.93811009"></polygon>
|
||||
<path d="M24.8073418,27.5493174 L24.8073418,31.6376991 L24.924557,31.6376991 C26.0227848,30.0814294 27.3455696,28.8730642 28.8951899,28.0163743 C30.4437975,27.1611927 32.2189873,26.7318422 34.218481,26.7318422 C36.1394937,26.7318422 37.8946835,27.102622 39.4825316,27.8416679 C41.0708861,28.5819706 42.276962,29.8856073 43.1005063,31.7548404 C44.0017722,30.431345 45.2270886,29.2629486 46.7767089,28.2506569 C48.3253165,27.2388679 50.158481,26.7318422 52.2764557,26.7318422 C53.8843038,26.7318422 55.3736709,26.9269101 56.7473418,27.3162917 C58.1189873,27.7056734 59.295443,28.3285835 60.2759494,29.185022 C61.255443,30.0422147 62.02,31.1615927 62.5701266,32.5426532 C63.1187342,33.9262275 63.3936709,35.5898349 63.3936709,37.5372459 L63.3936709,57.7443688 L55.0410127,57.7441174 L55.0410127,40.6319376 C55.0410127,39.6201486 55.0020253,38.6661761 54.9232911,37.7700202 C54.8440506,36.8751211 54.6293671,36.0968606 54.2764557,35.4339817 C53.9232911,34.772611 53.403038,34.2464807 52.7177215,33.8568477 C52.0313924,33.4689743 51.0997468,33.2731523 49.9235443,33.2731523 C48.7473418,33.2731523 47.7962025,33.4983853 47.0706329,33.944578 C46.344557,34.393033 45.7764557,34.9774826 45.3650633,35.6969211 C44.9534177,36.4181193 44.6787342,37.2353431 44.5417722,38.150855 C44.4037975,39.0653615 44.3356962,39.9904257 44.3356962,40.9247908 L44.3356962,57.7443688 L35.9835443,57.7443688 L35.9835443,40.8079009 C35.9835443,39.9124991 35.963038,39.0263982 35.9253165,38.150855 C35.8853165,37.2743064 35.7192405,36.4666349 35.424557,35.7263321 C35.1303797,34.9872862 34.64,34.393033 33.9539241,33.944578 C33.2675949,33.4983853 32.2579747,33.2731523 30.9248101,33.2731523 C30.5321519,33.2731523 30.0126582,33.3608826 29.3663291,33.5365945 C28.7192405,33.7118037 28.0913924,34.0433688 27.4840506,34.5292789 C26.875443,35.0164459 26.3564557,35.7172826 25.9250633,36.6315376 C25.4934177,37.5470495 25.2779747,38.7436 25.2779747,40.2229486 L25.2779747,57.7441174 L16.9260759,57.7443688 L16.9260759,27.5493174 L24.8073418,27.5493174 Z" id="m"></path>
|
||||
<path d="M68.7455696,31.9886202 C69.6075949,30.7033339 70.7060759,29.672189 72.0397468,28.8926716 C73.3724051,28.1141596 74.8716456,27.5596239 76.5387342,27.2283101 C78.2050633,26.8977505 79.8817722,26.7315908 81.5678481,26.7315908 C83.0974684,26.7315908 84.6458228,26.8391798 86.2144304,27.0525982 C87.7827848,27.2675248 89.2144304,27.6865688 90.5086076,28.3087248 C91.8025316,28.9313835 92.8610127,29.7983798 93.6848101,30.9074514 C94.5083544,32.0170257 94.92,33.4870734 94.92,35.3173431 L94.92,51.026844 C94.92,52.3913138 94.998481,53.6941963 95.1556962,54.9400165 C95.3113924,56.1865908 95.5863291,57.120956 95.9787342,57.7436147 L87.5091139,57.7436147 C87.3518987,57.276055 87.2240506,56.7996972 87.1265823,56.3125303 C87.0278481,55.8266202 86.9592405,55.3301523 86.9207595,54.8236294 C85.5873418,56.1865908 84.0182278,57.1405633 82.2156962,57.6857982 C80.4113924,58.2295248 78.5683544,58.503022 76.6860759,58.503022 C75.2346835,58.503022 73.8817722,58.3275615 72.6270886,57.9776459 C71.3718987,57.6269761 70.2744304,57.082244 69.3334177,56.3411872 C68.3921519,55.602644 67.656962,54.6680275 67.1275949,53.5390972 C66.5982278,52.410167 66.3331646,51.065556 66.3331646,49.5087835 C66.3331646,47.7961578 66.6367089,46.384178 67.2455696,45.2756092 C67.8529114,44.1652807 68.6367089,43.2799339 69.5987342,42.6173064 C70.5589873,41.9556844 71.6567089,41.4592165 72.8924051,41.1284055 C74.1273418,40.7978459 75.3721519,40.5356606 76.6270886,40.3398385 C77.8820253,40.1457761 79.116962,39.9896716 80.3329114,39.873033 C81.5483544,39.7558917 82.6270886,39.5804312 83.5681013,39.3469028 C84.5093671,39.1133743 85.2536709,38.7732624 85.8032911,38.3250587 C86.3513924,37.8773578 86.6063291,37.2252881 86.5678481,36.3680954 C86.5678481,35.4731963 86.4210127,34.7620532 86.1268354,34.2366771 C85.8329114,33.7113009 85.4405063,33.3018092 84.9506329,33.0099615 C84.4602532,32.7181138 83.8916456,32.5232972 83.2450633,32.4255119 C82.5977215,32.3294862 81.9010127,32.2797138 81.156962,32.2797138 C79.5098734,32.2797138 78.2159494,32.6303835 77.2746835,33.3312202 C76.3339241,34.0320569 75.7837975,35.2007046 75.6275949,36.8354037 L67.275443,36.8354037 C67.3924051,34.8892495 67.8817722,33.2726495 68.7455696,31.9886202 Z M85.2440506,43.6984752 C84.7149367,43.873433 84.1460759,44.0189798 83.5387342,44.1361211 C82.9306329,44.253011 82.2936709,44.350545 81.6270886,44.4279688 C80.96,44.5066495 80.2934177,44.6034294 79.6273418,44.7203193 C78.9994937,44.8362037 78.3820253,44.9933138 77.7749367,45.1871248 C77.1663291,45.3829468 76.636962,45.6451321 76.1865823,45.9759431 C75.7349367,46.3070055 75.3724051,46.7263009 75.0979747,47.2313156 C74.8232911,47.7375872 74.6863291,48.380356 74.6863291,49.1588679 C74.6863291,49.8979138 74.8232911,50.5218294 75.0979747,51.026844 C75.3724051,51.5338697 75.7455696,51.9328037 76.2159494,52.2246514 C76.6863291,52.5164991 77.2349367,52.7213706 77.8632911,52.8375064 C78.4898734,52.9546477 79.136962,53.012967 79.8037975,53.012967 C81.4506329,53.012967 82.724557,52.740978 83.6273418,52.1952404 C84.5288608,51.6507596 85.1949367,50.9981872 85.6270886,50.2382771 C86.0579747,49.4793725 86.323038,48.7119211 86.4212658,47.9321523 C86.518481,47.1536404 86.5681013,46.5304789 86.5681013,46.063422 L86.5681013,42.9677248 C86.2146835,43.2799339 85.7736709,43.5230147 85.2440506,43.6984752 Z" id="a"></path>
|
||||
<path d="M116.917975,27.5493174 L116.917975,33.0976917 L110.801266,33.0976917 L110.801266,48.0492936 C110.801266,49.4502128 111.036203,50.3850807 111.507089,50.8518862 C111.976962,51.3191945 112.918734,51.5527229 114.33038,51.5527229 C114.801013,51.5527229 115.251392,51.5336183 115.683038,51.4944037 C116.114177,51.4561945 116.526076,51.3968697 116.917975,51.3194459 L116.917975,57.7438661 C116.212152,57.860756 115.427595,57.9381798 114.565316,57.9778972 C113.702785,58.0153523 112.859747,58.0357138 112.036203,58.0357138 C110.742278,58.0357138 109.516456,57.9477321 108.36,57.7722716 C107.202785,57.5975651 106.183544,57.2577046 105.301519,56.7509303 C104.418987,56.2454128 103.722785,55.5242147 103.213418,54.5898495 C102.703038,53.6562385 102.448608,52.4292716 102.448608,50.9099541 L102.448608,33.0976917 L97.3903797,33.0976917 L97.3903797,27.5493174 L102.448608,27.5493174 L102.448608,18.4967596 L110.801013,18.4967596 L110.801013,27.5493174 L116.917975,27.5493174 Z" id="t"></path>
|
||||
<path d="M128.857975,27.5493174 L128.857975,33.1565138 L128.975696,33.1565138 C129.367089,32.2213945 129.896203,31.3559064 130.563544,30.557033 C131.23038,29.7596679 131.99443,29.0776844 132.857215,28.5130936 C133.719241,27.9495083 134.641266,27.5113596 135.622532,27.1988991 C136.601772,26.8879468 137.622025,26.7315908 138.681013,26.7315908 C139.229873,26.7315908 139.836962,26.8296275 140.504304,27.0239413 L140.504304,34.7336477 C140.111646,34.6552183 139.641013,34.586844 139.092658,34.5290275 C138.543291,34.4704569 138.014177,34.4410459 137.504304,34.4410459 C135.974937,34.4410459 134.681013,34.6949358 133.622785,35.2004532 C132.564051,35.7067248 131.711392,36.397255 131.064051,37.2735523 C130.417215,38.1501009 129.955443,39.1714422 129.681266,40.3398385 C129.407089,41.5074807 129.269873,42.7736624 129.269873,44.1361211 L129.269873,57.7438661 L120.917722,57.7438661 L120.917722,27.5493174 L128.857975,27.5493174 Z" id="r"></path>
|
||||
<path d="M144.033165,22.8767376 L144.033165,16.0435798 L152.386076,16.0435798 L152.386076,22.8767376 L144.033165,22.8767376 Z M152.386076,27.5493174 L152.386076,57.7438661 L144.033165,57.7438661 L144.033165,27.5493174 L152.386076,27.5493174 Z" id="i"></path>
|
||||
<polygon id="x" points="156.738228 27.5493174 166.266582 27.5493174 171.619494 35.4337303 176.913418 27.5493174 186.147848 27.5493174 176.148861 41.6831927 187.383544 57.7441174 177.85443 57.7441174 171.501772 48.2245028 165.148861 57.7441174 155.797468 57.7441174 166.737468 41.8589046"></polygon>
|
||||
<polygon id="right-bracket" points="197.580759 82.7268844 197.580759 1.93811009 191.725063 1.93811009 191.725063 0 199.828354 0 199.828354 84.6652459 191.725063 84.6652459 191.725063 82.7268844"></polygon>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
</div>
|
||||
<h1>It works! Synapse is running</h1>
|
||||
<p>Your Synapse server is listening on this port and is ready for messages.</p>
|
||||
<p>To use this server you'll need <a href="https://matrix.org/docs/projects/try-matrix-now.html#clients" target="_blank">a Matrix client</a>.
|
||||
</p>
|
||||
<p>Welcome to the Matrix universe :)</p>
|
||||
<hr>
|
||||
<p>
|
||||
<small>
|
||||
<a href="https://matrix.org" target="_blank">
|
||||
matrix.org
|
||||
</a>
|
||||
</small>
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -119,6 +119,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||
db_conn, "device_lists_stream", "stream_id",
|
||||
)
|
||||
|
||||
self._transaction_id_gen = IdGenerator(db_conn, "sent_transactions", "id")
|
||||
self._access_tokens_id_gen = IdGenerator(db_conn, "access_tokens", "id")
|
||||
self._event_reports_id_gen = IdGenerator(db_conn, "event_reports", "id")
|
||||
self._push_rule_id_gen = IdGenerator(db_conn, "push_rules", "id")
|
||||
|
||||
@@ -29,7 +29,6 @@ from synapse.api.errors import StoreError
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
from synapse.util.caches.descriptors import Cache
|
||||
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
||||
from synapse.util.stringutils import exception_to_unicode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -250,32 +249,32 @@ class SQLBaseStore(object):
|
||||
except self.database_engine.module.OperationalError as e:
|
||||
# This can happen if the database disappears mid
|
||||
# transaction.
|
||||
logger.warning(
|
||||
logger.warn(
|
||||
"[TXN OPERROR] {%s} %s %d/%d",
|
||||
name, exception_to_unicode(e), i, N
|
||||
name, e, i, N
|
||||
)
|
||||
if i < N:
|
||||
i += 1
|
||||
try:
|
||||
conn.rollback()
|
||||
except self.database_engine.module.Error as e1:
|
||||
logger.warning(
|
||||
logger.warn(
|
||||
"[TXN EROLL] {%s} %s",
|
||||
name, exception_to_unicode(e1),
|
||||
name, e1,
|
||||
)
|
||||
continue
|
||||
raise
|
||||
except self.database_engine.module.DatabaseError as e:
|
||||
if self.database_engine.is_deadlock(e):
|
||||
logger.warning("[TXN DEADLOCK] {%s} %d/%d", name, i, N)
|
||||
logger.warn("[TXN DEADLOCK] {%s} %d/%d", name, i, N)
|
||||
if i < N:
|
||||
i += 1
|
||||
try:
|
||||
conn.rollback()
|
||||
except self.database_engine.module.Error as e1:
|
||||
logger.warning(
|
||||
logger.warn(
|
||||
"[TXN EROLL] {%s} %s",
|
||||
name, exception_to_unicode(e1),
|
||||
name, e1,
|
||||
)
|
||||
continue
|
||||
raise
|
||||
@@ -850,9 +849,9 @@ class SQLBaseStore(object):
|
||||
rowcount = cls._simple_update_txn(txn, table, keyvalues, updatevalues)
|
||||
|
||||
if rowcount == 0:
|
||||
raise StoreError(404, "No row found (%s)" % (table,))
|
||||
raise StoreError(404, "No row found")
|
||||
if rowcount > 1:
|
||||
raise StoreError(500, "More than one row matched (%s)" % (table,))
|
||||
raise StoreError(500, "More than one row matched")
|
||||
|
||||
@staticmethod
|
||||
def _simple_select_one_txn(txn, table, keyvalues, retcols,
|
||||
@@ -869,9 +868,9 @@ class SQLBaseStore(object):
|
||||
if not row:
|
||||
if allow_none:
|
||||
return None
|
||||
raise StoreError(404, "No row found (%s)" % (table,))
|
||||
raise StoreError(404, "No row found")
|
||||
if txn.rowcount > 1:
|
||||
raise StoreError(500, "More than one row matched (%s)" % (table,))
|
||||
raise StoreError(500, "More than one row matched")
|
||||
|
||||
return dict(zip(retcols, row))
|
||||
|
||||
@@ -903,9 +902,9 @@ class SQLBaseStore(object):
|
||||
|
||||
txn.execute(sql, list(keyvalues.values()))
|
||||
if txn.rowcount == 0:
|
||||
raise StoreError(404, "No row found (%s)" % (table,))
|
||||
raise StoreError(404, "No row found")
|
||||
if txn.rowcount > 1:
|
||||
raise StoreError(500, "More than one row matched (%s)" % (table,))
|
||||
raise StoreError(500, "more than one row matched")
|
||||
|
||||
def _simple_delete(self, table, keyvalues, desc):
|
||||
return self.runInteraction(
|
||||
|
||||
@@ -182,7 +182,7 @@ class EndToEndRoomKeyStore(SQLBaseStore):
|
||||
|
||||
keyvalues = {
|
||||
"user_id": user_id,
|
||||
"version": int(version),
|
||||
"version": version,
|
||||
}
|
||||
if room_id:
|
||||
keyvalues['room_id'] = room_id
|
||||
|
||||
@@ -34,9 +34,8 @@ class MonthlyActiveUsersStore(SQLBaseStore):
|
||||
self.hs = hs
|
||||
self.reserved_users = ()
|
||||
# Do not add more reserved users than the total allowable number
|
||||
self._new_transaction(
|
||||
dbconn, "initialise_mau_threepids", [], [],
|
||||
self._initialise_reserved_users,
|
||||
self._initialise_reserved_users(
|
||||
dbconn.cursor(),
|
||||
hs.config.mau_limits_reserved_threepids[:self.hs.config.max_mau_value],
|
||||
)
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Remember to update this number every time a change is made to database
|
||||
# schema files, so the users will be informed on server restarts.
|
||||
SCHEMA_VERSION = 53
|
||||
SCHEMA_VERSION = 52
|
||||
|
||||
dir_path = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -257,7 +257,7 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
|
||||
module.run_create(cur, database_engine)
|
||||
if not is_empty:
|
||||
module.run_upgrade(cur, database_engine, config=config)
|
||||
elif ext == ".pyc" or file_name == "__pycache__":
|
||||
elif ext == ".pyc":
|
||||
# Sometimes .pyc files turn up anyway even though we've
|
||||
# disabled their generation; e.g. from distribution package
|
||||
# installers. Silently skip it
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
/* Copyright 2018 New Vector Ltd
|
||||
/* Copyright 2015, 2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,4 +13,4 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
DROP TABLE IF EXISTS sent_transactions;
|
||||
CREATE INDEX IF NOT EXISTS sent_transaction_txn_id ON sent_transactions(transaction_id);
|
||||
32
synapse/storage/schema/delta/34/sent_txn_purge.py
Normal file
32
synapse/storage/schema/delta/34/sent_txn_purge.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run_create(cur, database_engine, *args, **kwargs):
|
||||
if isinstance(database_engine, PostgresEngine):
|
||||
cur.execute("TRUNCATE sent_transactions")
|
||||
else:
|
||||
cur.execute("DELETE FROM sent_transactions")
|
||||
|
||||
cur.execute("CREATE INDEX sent_transactions_ts ON sent_transactions(ts)")
|
||||
|
||||
|
||||
def run_upgrade(cur, database_engine, *args, **kwargs):
|
||||
pass
|
||||
@@ -25,6 +25,25 @@ CREATE TABLE IF NOT EXISTS received_transactions(
|
||||
|
||||
CREATE INDEX transactions_have_ref ON received_transactions(origin, has_been_referenced);-- WHERE has_been_referenced = 0;
|
||||
|
||||
|
||||
-- Stores what transactions we've sent, what their response was (if we got one) and whether we have
|
||||
-- since referenced the transaction in another outgoing transaction
|
||||
CREATE TABLE IF NOT EXISTS sent_transactions(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT, -- This is used to apply insertion ordering
|
||||
transaction_id TEXT,
|
||||
destination TEXT,
|
||||
response_code INTEGER DEFAULT 0,
|
||||
response_json TEXT,
|
||||
ts BIGINT
|
||||
);
|
||||
|
||||
CREATE INDEX sent_transaction_dest ON sent_transactions(destination);
|
||||
CREATE INDEX sent_transaction_txn_id ON sent_transactions(transaction_id);
|
||||
-- So that we can do an efficient look up of all transactions that have yet to be successfully
|
||||
-- sent.
|
||||
CREATE INDEX sent_transaction_sent ON sent_transactions(response_code);
|
||||
|
||||
|
||||
-- For sent transactions only.
|
||||
CREATE TABLE IF NOT EXISTS transaction_id_to_pdu(
|
||||
transaction_id INTEGER,
|
||||
|
||||
@@ -25,6 +25,25 @@ CREATE TABLE IF NOT EXISTS received_transactions(
|
||||
|
||||
CREATE INDEX transactions_have_ref ON received_transactions(origin, has_been_referenced);-- WHERE has_been_referenced = 0;
|
||||
|
||||
|
||||
-- Stores what transactions we've sent, what their response was (if we got one) and whether we have
|
||||
-- since referenced the transaction in another outgoing transaction
|
||||
CREATE TABLE IF NOT EXISTS sent_transactions(
|
||||
id BIGINT PRIMARY KEY, -- This is used to apply insertion ordering
|
||||
transaction_id TEXT,
|
||||
destination TEXT,
|
||||
response_code INTEGER DEFAULT 0,
|
||||
response_json TEXT,
|
||||
ts BIGINT
|
||||
);
|
||||
|
||||
CREATE INDEX sent_transaction_dest ON sent_transactions(destination);
|
||||
CREATE INDEX sent_transaction_txn_id ON sent_transactions(transaction_id);
|
||||
-- So that we can do an efficient look up of all transactions that have yet to be successfully
|
||||
-- sent.
|
||||
CREATE INDEX sent_transaction_sent ON sent_transactions(response_code);
|
||||
|
||||
|
||||
-- For sent transactions only.
|
||||
CREATE TABLE IF NOT EXISTS transaction_id_to_pdu(
|
||||
transaction_id INTEGER,
|
||||
|
||||
@@ -45,10 +45,6 @@ class SearchStore(BackgroundUpdateStore):
|
||||
|
||||
def __init__(self, db_conn, hs):
|
||||
super(SearchStore, self).__init__(db_conn, hs)
|
||||
|
||||
if not hs.config.enable_search:
|
||||
return
|
||||
|
||||
self.register_background_update_handler(
|
||||
self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search
|
||||
)
|
||||
@@ -320,8 +316,6 @@ class SearchStore(BackgroundUpdateStore):
|
||||
entries (iterable[SearchEntry]):
|
||||
entries to be added to the table
|
||||
"""
|
||||
if not self.hs.config.enable_search:
|
||||
return
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
sql = (
|
||||
"INSERT INTO event_search"
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
import random
|
||||
import string
|
||||
|
||||
import six
|
||||
from six import PY2, PY3
|
||||
from six import PY3
|
||||
from six.moves import range
|
||||
|
||||
_string_with_symbols = (
|
||||
@@ -72,39 +71,3 @@ def to_ascii(s):
|
||||
return s.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
return s
|
||||
|
||||
|
||||
def exception_to_unicode(e):
|
||||
"""Helper function to extract the text of an exception as a unicode string
|
||||
|
||||
Args:
|
||||
e (Exception): exception to be stringified
|
||||
|
||||
Returns:
|
||||
unicode
|
||||
"""
|
||||
# urgh, this is a mess. The basic problem here is that psycopg2 constructs its
|
||||
# exceptions with PyErr_SetString, with a (possibly non-ascii) argument. str() will
|
||||
# then produce the raw byte sequence. Under Python 2, this will then cause another
|
||||
# error if it gets mixed with a `unicode` object, as per
|
||||
# https://github.com/matrix-org/synapse/issues/4252
|
||||
|
||||
# First of all, if we're under python3, everything is fine because it will sort this
|
||||
# nonsense out for us.
|
||||
if not PY2:
|
||||
return str(e)
|
||||
|
||||
# otherwise let's have a stab at decoding the exception message. We'll circumvent
|
||||
# Exception.__str__(), which would explode if someone raised Exception(u'non-ascii')
|
||||
# and instead look at what is in the args member.
|
||||
|
||||
if len(e.args) == 0:
|
||||
return u""
|
||||
elif len(e.args) > 1:
|
||||
return six.text_type(repr(e.args))
|
||||
|
||||
msg = e.args[0]
|
||||
if isinstance(msg, bytes):
|
||||
return msg.decode('utf-8', errors='replace')
|
||||
else:
|
||||
return msg
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2018 New Vector Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,9 +15,7 @@
|
||||
|
||||
from twisted.trial import util
|
||||
|
||||
import tests.patch_inline_callbacks
|
||||
|
||||
# attempt to do the patch before we load any synapse code
|
||||
tests.patch_inline_callbacks.do_patch()
|
||||
from tests import utils
|
||||
|
||||
util.DEFAULT_TIMEOUT_DURATION = 10
|
||||
utils.setupdb()
|
||||
|
||||
@@ -192,6 +192,8 @@ class AuthTestCase(unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon(self):
|
||||
# TODO(danielwh): Remove this mock when we remove the
|
||||
# get_user_by_access_token fallback.
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org", "device_id": "device"}
|
||||
)
|
||||
@@ -216,7 +218,6 @@ class AuthTestCase(unittest.TestCase):
|
||||
@defer.inlineCallbacks
|
||||
def test_get_guest_user_from_macaroon(self):
|
||||
self.store.get_user_by_id = Mock(return_value={"is_guest": True})
|
||||
self.store.get_user_by_access_token = Mock(return_value=None)
|
||||
|
||||
user_id = "@baldrick:matrix.org"
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
@@ -237,6 +238,158 @@ class AuthTestCase(unittest.TestCase):
|
||||
self.assertTrue(is_guest)
|
||||
self.store.get_user_by_id.assert_called_with(user_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon_user_db_mismatch(self):
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@percy:matrix.org"}
|
||||
)
|
||||
|
||||
user = "@baldrick:matrix.org"
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key,
|
||||
)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
macaroon.add_first_party_caveat("user_id = %s" % (user,))
|
||||
with self.assertRaises(AuthError) as cm:
|
||||
yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
self.assertEqual(401, cm.exception.code)
|
||||
self.assertIn("User mismatch", cm.exception.msg)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon_missing_caveat(self):
|
||||
# TODO(danielwh): Remove this mock when we remove the
|
||||
# get_user_by_access_token fallback.
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key,
|
||||
)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
|
||||
with self.assertRaises(AuthError) as cm:
|
||||
yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
self.assertEqual(401, cm.exception.code)
|
||||
self.assertIn("No user caveat", cm.exception.msg)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon_wrong_key(self):
|
||||
# TODO(danielwh): Remove this mock when we remove the
|
||||
# get_user_by_access_token fallback.
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
user = "@baldrick:matrix.org"
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key + "wrong",
|
||||
)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
macaroon.add_first_party_caveat("user_id = %s" % (user,))
|
||||
|
||||
with self.assertRaises(AuthError) as cm:
|
||||
yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
self.assertEqual(401, cm.exception.code)
|
||||
self.assertIn("Invalid macaroon", cm.exception.msg)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon_unknown_caveat(self):
|
||||
# TODO(danielwh): Remove this mock when we remove the
|
||||
# get_user_by_access_token fallback.
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
user = "@baldrick:matrix.org"
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key,
|
||||
)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
macaroon.add_first_party_caveat("user_id = %s" % (user,))
|
||||
macaroon.add_first_party_caveat("cunning > fox")
|
||||
|
||||
with self.assertRaises(AuthError) as cm:
|
||||
yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
self.assertEqual(401, cm.exception.code)
|
||||
self.assertIn("Invalid macaroon", cm.exception.msg)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon_expired(self):
|
||||
# TODO(danielwh): Remove this mock when we remove the
|
||||
# get_user_by_access_token fallback.
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
user = "@baldrick:matrix.org"
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key,
|
||||
)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
macaroon.add_first_party_caveat("user_id = %s" % (user,))
|
||||
macaroon.add_first_party_caveat("time < -2000") # ms
|
||||
|
||||
self.hs.clock.now = 5000 # seconds
|
||||
self.hs.config.expire_access_token = True
|
||||
# yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
# TODO(daniel): Turn on the check that we validate expiration, when we
|
||||
# validate expiration (and remove the above line, which will start
|
||||
# throwing).
|
||||
with self.assertRaises(AuthError) as cm:
|
||||
yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
self.assertEqual(401, cm.exception.code)
|
||||
self.assertIn("Invalid macaroon", cm.exception.msg)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_get_user_from_macaroon_with_valid_duration(self):
|
||||
# TODO(danielwh): Remove this mock when we remove the
|
||||
# get_user_by_access_token fallback.
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
self.store.get_user_by_access_token = Mock(
|
||||
return_value={"name": "@baldrick:matrix.org"}
|
||||
)
|
||||
|
||||
user_id = "@baldrick:matrix.org"
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key,
|
||||
)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
|
||||
macaroon.add_first_party_caveat("time < 900000000") # ms
|
||||
|
||||
self.hs.clock.now = 5000 # seconds
|
||||
self.hs.config.expire_access_token = True
|
||||
|
||||
user_info = yield self.auth.get_user_by_access_token(macaroon.serialize())
|
||||
user = user_info["user"]
|
||||
self.assertEqual(UserID.from_string(user_id), user)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_cannot_use_regular_token_as_guest(self):
|
||||
USER_ID = "@percy:matrix.org"
|
||||
|
||||
@@ -63,14 +63,6 @@ class KeyringTestCase(unittest.TestCase):
|
||||
keys = self.mock_perspective_server.get_verify_keys()
|
||||
self.hs.config.perspectives = {self.mock_perspective_server.server_name: keys}
|
||||
|
||||
def assert_sentinel_context(self):
|
||||
if LoggingContext.current_context() != LoggingContext.sentinel:
|
||||
self.fail(
|
||||
"Expected sentinel context but got %s" % (
|
||||
LoggingContext.current_context(),
|
||||
)
|
||||
)
|
||||
|
||||
def check_context(self, _, expected):
|
||||
self.assertEquals(
|
||||
getattr(LoggingContext.current_context(), "request", None), expected
|
||||
@@ -78,6 +70,8 @@ class KeyringTestCase(unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_wait_for_previous_lookups(self):
|
||||
sentinel_context = LoggingContext.current_context()
|
||||
|
||||
kr = keyring.Keyring(self.hs)
|
||||
|
||||
lookup_1_deferred = defer.Deferred()
|
||||
@@ -105,10 +99,8 @@ class KeyringTestCase(unittest.TestCase):
|
||||
["server1"], {"server1": lookup_2_deferred}
|
||||
)
|
||||
self.assertFalse(wait_2_deferred.called)
|
||||
|
||||
# ... so we should have reset the LoggingContext.
|
||||
self.assert_sentinel_context()
|
||||
|
||||
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
||||
wait_2_deferred.addBoth(self.check_context, "two")
|
||||
|
||||
# let the first lookup complete (in the sentinel context)
|
||||
@@ -206,6 +198,8 @@ class KeyringTestCase(unittest.TestCase):
|
||||
json1 = {}
|
||||
signedjson.sign.sign_json(json1, "server9", key1)
|
||||
|
||||
sentinel_context = LoggingContext.current_context()
|
||||
|
||||
with LoggingContext("one") as context_one:
|
||||
context_one.request = "one"
|
||||
|
||||
@@ -219,7 +213,7 @@ class KeyringTestCase(unittest.TestCase):
|
||||
|
||||
defer = kr.verify_json_for_server("server9", json1)
|
||||
self.assertFalse(defer.called)
|
||||
self.assert_sentinel_context()
|
||||
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
||||
yield defer
|
||||
|
||||
self.assertIs(LoggingContext.current_context(), context_one)
|
||||
|
||||
@@ -150,6 +150,7 @@ class RegistrationTestCase(unittest.TestCase):
|
||||
self.hs.config.auto_join_rooms = [room_alias_str]
|
||||
res = yield self.handler.register(localpart='jeff')
|
||||
rooms = yield self.store.get_rooms_for_user(res[0])
|
||||
|
||||
directory_handler = self.hs.get_handlers().directory_handler
|
||||
room_alias = RoomAlias.from_string(room_alias_str)
|
||||
room_id = yield directory_handler.get_association(room_alias)
|
||||
@@ -183,14 +184,3 @@ class RegistrationTestCase(unittest.TestCase):
|
||||
res = yield self.handler.register(localpart='jeff')
|
||||
rooms = yield self.store.get_rooms_for_user(res[0])
|
||||
self.assertEqual(len(rooms), 0)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_auto_create_auto_join_where_no_consent(self):
|
||||
self.hs.config.user_consent_at_registration = True
|
||||
self.hs.config.block_events_without_consent_error = "Error"
|
||||
room_alias_str = "#room:test"
|
||||
self.hs.config.auto_join_rooms = [room_alias_str]
|
||||
res = yield self.handler.register(localpart='jeff')
|
||||
yield self.handler.post_consent_actions(res[0])
|
||||
rooms = yield self.store.get_rooms_for_user(res[0])
|
||||
self.assertEqual(len(rooms), 0)
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2018 New Vector Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
|
||||
def do_patch():
|
||||
"""
|
||||
Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit
|
||||
"""
|
||||
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
orig_inline_callbacks = defer.inlineCallbacks
|
||||
|
||||
def new_inline_callbacks(f):
|
||||
|
||||
orig = orig_inline_callbacks(f)
|
||||
|
||||
@functools.wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
start_context = LoggingContext.current_context()
|
||||
|
||||
try:
|
||||
res = orig(*args, **kwargs)
|
||||
except Exception:
|
||||
if LoggingContext.current_context() != start_context:
|
||||
err = "%s changed context from %s to %s on exception" % (
|
||||
f, start_context, LoggingContext.current_context()
|
||||
)
|
||||
print(err, file=sys.stderr)
|
||||
raise Exception(err)
|
||||
raise
|
||||
|
||||
if not isinstance(res, Deferred) or res.called:
|
||||
if LoggingContext.current_context() != start_context:
|
||||
err = "%s changed context from %s to %s" % (
|
||||
f, start_context, LoggingContext.current_context()
|
||||
)
|
||||
# print the error to stderr because otherwise all we
|
||||
# see in travis-ci is the 500 error
|
||||
print(err, file=sys.stderr)
|
||||
raise Exception(err)
|
||||
return res
|
||||
|
||||
if LoggingContext.current_context() != LoggingContext.sentinel:
|
||||
err = (
|
||||
"%s returned incomplete deferred in non-sentinel context "
|
||||
"%s (start was %s)"
|
||||
) % (
|
||||
f, LoggingContext.current_context(), start_context,
|
||||
)
|
||||
print(err, file=sys.stderr)
|
||||
raise Exception(err)
|
||||
|
||||
def check_ctx(r):
|
||||
if LoggingContext.current_context() != start_context:
|
||||
err = "%s completion of %s changed context from %s to %s" % (
|
||||
"Failure" if isinstance(r, Failure) else "Success",
|
||||
f, start_context, LoggingContext.current_context(),
|
||||
)
|
||||
print(err, file=sys.stderr)
|
||||
raise Exception(err)
|
||||
return r
|
||||
|
||||
res.addBoth(check_ctx)
|
||||
return res
|
||||
|
||||
return wrapped
|
||||
|
||||
defer.inlineCallbacks = new_inline_callbacks
|
||||
@@ -30,7 +30,6 @@ from synapse.rest.media.v1._base import FileInfo
|
||||
from synapse.rest.media.v1.filepath import MediaFilePaths
|
||||
from synapse.rest.media.v1.media_storage import MediaStorage
|
||||
from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
|
||||
from synapse.util.logcontext import make_deferred_yieldable
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
from tests import unittest
|
||||
@@ -114,7 +113,7 @@ class MediaRepoTests(unittest.HomeserverTestCase):
|
||||
d = Deferred()
|
||||
d.addCallback(write_to)
|
||||
self.fetches.append((d, destination, path, args))
|
||||
return make_deferred_yieldable(d)
|
||||
return d
|
||||
|
||||
client = Mock()
|
||||
client.get_file = get_file
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user