Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6f98bc6512 | |||
| 50a63d5fda | |||
| 280bfb15ce | |||
| fff89c33d8 |
@@ -0,0 +1,21 @@
|
|||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:9.5
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
testenv:
|
||||||
|
image: python:3.5
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
working_dir: /app
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:11
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
testenv:
|
||||||
|
image: python:3.7
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
working_dir: /app
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:9.5
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
testenv:
|
||||||
|
image: python:3.7
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
working_dir: /app
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
import sys
|
||||||
|
from tap.parser import Parser
|
||||||
|
from tap.line import Result, Unknown, Diagnostic
|
||||||
|
|
||||||
|
out = ["### TAP Output for " + sys.argv[2]]
|
||||||
|
|
||||||
|
p = Parser()
|
||||||
|
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
for line in p.parse_file(sys.argv[1]):
|
||||||
|
if isinstance(line, Result):
|
||||||
|
if in_error:
|
||||||
|
out.append("")
|
||||||
|
out.append("</pre></code></details>")
|
||||||
|
out.append("")
|
||||||
|
out.append("----")
|
||||||
|
out.append("")
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
if not line.ok and not line.todo:
|
||||||
|
in_error = True
|
||||||
|
|
||||||
|
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
|
||||||
|
out.append("")
|
||||||
|
out.append("<details><summary>Show log</summary><code><pre>")
|
||||||
|
|
||||||
|
elif isinstance(line, Diagnostic) and in_error:
|
||||||
|
out.append(line.text)
|
||||||
|
|
||||||
|
if out:
|
||||||
|
for line in out[:-3]:
|
||||||
|
print(line)
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||||
echo "Not merging forward, as this is a release branch"
|
echo "Not merging forward, as this is a release branch"
|
||||||
@@ -18,8 +18,6 @@ else
|
|||||||
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "--- merge_base_branch $GITBASE"
|
|
||||||
|
|
||||||
# Show what we are before
|
# Show what we are before
|
||||||
git --no-pager show -s
|
git --no-pager show -s
|
||||||
|
|
||||||
@@ -29,7 +27,7 @@ git config --global user.name "A robot"
|
|||||||
|
|
||||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||||
git fetch -u origin $GITBASE
|
git fetch -u origin $GITBASE
|
||||||
git merge --no-edit --no-commit origin/$GITBASE
|
git merge --no-edit origin/$GITBASE
|
||||||
|
|
||||||
# Show what we are after.
|
# Show what we are after.
|
||||||
git --no-pager show -s
|
git --no-pager show -s
|
||||||
|
|||||||
@@ -0,0 +1,232 @@
|
|||||||
|
env:
|
||||||
|
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e check_codestyle"
|
||||||
|
label: "\U0001F9F9 Check Style"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e packaging"
|
||||||
|
label: "\U0001F9F9 packaging"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e check_isort"
|
||||||
|
label: "\U0001F9F9 isort"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "scripts-dev/check-newsfragment"
|
||||||
|
label: ":newspaper: Newsfile"
|
||||||
|
branches: "!master !develop !release-*"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
propagate-environment: true
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e check-sampleconfig"
|
||||||
|
label: "\U0001F9F9 check-sample-config"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- wait
|
||||||
|
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py35-old,codecov"
|
||||||
|
label: ":python: 3.5 / SQLite / Old Deps"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.5"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py35,codecov"
|
||||||
|
label: ":python: 3.5 / SQLite"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.5"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py36,codecov"
|
||||||
|
label: ":python: 3.6 / SQLite"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py37,codecov"
|
||||||
|
label: ":python: 3.7 / SQLite"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.7"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: ":python: 3.5 / :postgres: 9.5"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
command:
|
||||||
|
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
||||||
|
plugins:
|
||||||
|
- docker-compose#v2.1.0:
|
||||||
|
run: testenv
|
||||||
|
config:
|
||||||
|
- .buildkite/docker-compose.py35.pg95.yaml
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: ":python: 3.7 / :postgres: 9.5"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
command:
|
||||||
|
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||||
|
plugins:
|
||||||
|
- docker-compose#v2.1.0:
|
||||||
|
run: testenv
|
||||||
|
config:
|
||||||
|
- .buildkite/docker-compose.py37.pg95.yaml
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: ":python: 3.7 / :postgres: 11"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
command:
|
||||||
|
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||||
|
plugins:
|
||||||
|
- docker-compose#v2.1.0:
|
||||||
|
run: testenv
|
||||||
|
config:
|
||||||
|
- .buildkite/docker-compose.py37.pg11.yaml
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash /synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
always-pull: true
|
||||||
|
workdir: "/src"
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash /synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
always-pull: true
|
||||||
|
workdir: "/src"
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
WORKERS: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash /synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
always-pull: true
|
||||||
|
workdir: "/src"
|
||||||
|
soft_fail: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
|
||||||
# Tells the script to connect to the postgresql database that will be available in the
|
|
||||||
# CI's Docker setup at the point where this file is considered.
|
|
||||||
server_name: "localhost:8800"
|
|
||||||
|
|
||||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
|
||||||
|
|
||||||
report_stats: false
|
|
||||||
|
|
||||||
database:
|
|
||||||
name: "psycopg2"
|
|
||||||
args:
|
|
||||||
user: postgres
|
|
||||||
host: postgres
|
|
||||||
password: postgres
|
|
||||||
database: synapse
|
|
||||||
|
|
||||||
# Suppress the key server warning.
|
|
||||||
trusted_key_servers:
|
|
||||||
- server_name: "matrix.org"
|
|
||||||
suppress_key_server_warning: true
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from synapse.storage.engines import create_engine
|
|
||||||
|
|
||||||
logger = logging.getLogger("create_postgres_db")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Create a PostgresEngine.
|
|
||||||
db_engine = create_engine({"name": "psycopg2", "args": {}})
|
|
||||||
|
|
||||||
# Connect to postgres to create the base database.
|
|
||||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
|
||||||
# doesn't exist yet.
|
|
||||||
db_conn = db_engine.module.connect(
|
|
||||||
user="postgres", host="postgres", password="postgres", dbname="postgres"
|
|
||||||
)
|
|
||||||
db_conn.autocommit = True
|
|
||||||
cur = db_conn.cursor()
|
|
||||||
cur.execute("CREATE DATABASE synapse;")
|
|
||||||
cur.close()
|
|
||||||
db_conn.close()
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# this script is run by buildkite in a plain `bionic` container; it installs the
|
|
||||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
|
||||||
|
|
||||||
export LANG="C.UTF-8"
|
|
||||||
|
|
||||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
|
||||||
export VIRTUALENV_NO_DOWNLOAD=1
|
|
||||||
|
|
||||||
exec tox -e py3-old,combine
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
#
|
|
||||||
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
|
|
||||||
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
|
|
||||||
# driver), update the schema of the test SQLite database and run background updates on it,
|
|
||||||
# create an empty test database in PostgreSQL, then run the 'synapse_port_db' script to
|
|
||||||
# test porting the SQLite database to the PostgreSQL database (with coverage).
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd `dirname $0`/../..
|
|
||||||
|
|
||||||
echo "--- Install dependencies"
|
|
||||||
|
|
||||||
# Install dependencies for this test.
|
|
||||||
pip install psycopg2 coverage coverage-enable-subprocess
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare the databases"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
./.buildkite/scripts/create_postgres_db.py
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db"
|
|
||||||
|
|
||||||
# Run the script
|
|
||||||
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
|
||||||
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
|
||||||
# schema and run background updates on it.
|
|
||||||
server_name: "localhost:8800"
|
|
||||||
|
|
||||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
|
||||||
|
|
||||||
report_stats: false
|
|
||||||
|
|
||||||
database:
|
|
||||||
name: "sqlite3"
|
|
||||||
args:
|
|
||||||
database: ".buildkite/test_db.db"
|
|
||||||
|
|
||||||
# Suppress the key server warning.
|
|
||||||
trusted_key_servers:
|
|
||||||
- server_name: "matrix.org"
|
|
||||||
suppress_key_server_warning: true
|
|
||||||
Binary file not shown.
@@ -1,10 +0,0 @@
|
|||||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
|
||||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
|
||||||
|
|
||||||
Can re-join room if re-invited
|
|
||||||
|
|
||||||
# new failures as of https://github.com/matrix-org/sytest/pull/732
|
|
||||||
Device list doesn't change if remote server is down
|
|
||||||
|
|
||||||
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
|
|
||||||
Server correctly handles incoming m.device_list_update
|
|
||||||
+10
-55
@@ -1,35 +1,24 @@
|
|||||||
version: 2.1
|
version: 2
|
||||||
jobs:
|
jobs:
|
||||||
dockerhubuploadrelease:
|
dockerhubuploadrelease:
|
||||||
docker:
|
machine: true
|
||||||
- image: docker:git
|
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- docker_prepare
|
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 .
|
||||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
# for release builds, we want to get the amd64 image out asap, so first
|
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
# we do an amd64-only build, before following up with a multiarch build.
|
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
||||||
- docker_build:
|
|
||||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
|
||||||
platforms: linux/amd64
|
|
||||||
- docker_build:
|
|
||||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
|
|
||||||
dockerhubuploadlatest:
|
dockerhubuploadlatest:
|
||||||
docker:
|
machine: true
|
||||||
- image: docker:git
|
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- docker_prepare
|
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 .
|
||||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
# for `latest`, we don't want the arm images to disappear, so don't update the tag
|
- run: docker push matrixdotorg/synapse:latest
|
||||||
# until all of the platforms are built.
|
- run: docker push matrixdotorg/synapse:latest-py3
|
||||||
- docker_build:
|
|
||||||
tag: -t matrixdotorg/synapse:latest
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
|
version: 2
|
||||||
build:
|
build:
|
||||||
jobs:
|
jobs:
|
||||||
- dockerhubuploadrelease:
|
- dockerhubuploadrelease:
|
||||||
@@ -42,37 +31,3 @@ workflows:
|
|||||||
filters:
|
filters:
|
||||||
branches:
|
branches:
|
||||||
only: master
|
only: master
|
||||||
|
|
||||||
commands:
|
|
||||||
docker_prepare:
|
|
||||||
description: Sets up a remote docker server, downloads the buildx cli plugin, and enables multiarch images
|
|
||||||
parameters:
|
|
||||||
buildx_version:
|
|
||||||
type: string
|
|
||||||
default: "v0.4.1"
|
|
||||||
steps:
|
|
||||||
- setup_remote_docker:
|
|
||||||
# 19.03.13 was the most recent available on circleci at the time of
|
|
||||||
# writing.
|
|
||||||
version: 19.03.13
|
|
||||||
- run: apk add --no-cache curl
|
|
||||||
- run: mkdir -vp ~/.docker/cli-plugins/ ~/dockercache
|
|
||||||
- run: curl --silent -L "https://github.com/docker/buildx/releases/download/<< parameters.buildx_version >>/buildx-<< parameters.buildx_version >>.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
|
|
||||||
- run: chmod a+x ~/.docker/cli-plugins/docker-buildx
|
|
||||||
# install qemu links in /proc/sys/fs/binfmt_misc on the docker instance running the circleci job
|
|
||||||
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
|
||||||
# create a context named `builder` for the builds
|
|
||||||
- run: docker context create builder
|
|
||||||
# create a buildx builder using the new context, and set it as the default
|
|
||||||
- run: docker buildx create builder --use
|
|
||||||
|
|
||||||
docker_build:
|
|
||||||
description: Builds and pushed images to dockerhub using buildx
|
|
||||||
parameters:
|
|
||||||
platforms:
|
|
||||||
type: string
|
|
||||||
default: linux/amd64
|
|
||||||
tag:
|
|
||||||
type: string
|
|
||||||
steps:
|
|
||||||
- run: docker buildx build -f docker/Dockerfile --push --platform << parameters.platforms >> --label gitsha1=${CIRCLE_SHA1} << parameters.tag >> --progress=plain .
|
|
||||||
|
|||||||
+2
-1
@@ -1,4 +1,5 @@
|
|||||||
comment: off
|
comment:
|
||||||
|
layout: "diff"
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
|
|||||||
+1
-2
@@ -1,8 +1,7 @@
|
|||||||
[run]
|
[run]
|
||||||
branch = True
|
branch = True
|
||||||
parallel = True
|
parallel = True
|
||||||
include=$TOP/synapse/*
|
include = synapse/*
|
||||||
data_file = $TOP/.coverage
|
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
precision = 2
|
precision = 2
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
# Black reformatting (#5482).
|
|
||||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
|
||||||
|
|
||||||
# Target Python 3.5 with black (#8664).
|
|
||||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
|
||||||
|
|
||||||
# Update black to 20.8b1 (#9381).
|
|
||||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
**If you are looking for support** please ask in **#synapse:matrix.org**
|
|
||||||
(using a matrix.org account if necessary). We do not use GitHub issues for
|
|
||||||
support.
|
|
||||||
|
|
||||||
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/
|
|
||||||
@@ -6,11 +6,9 @@ about: Create a report to help us improve
|
|||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**:
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
You will likely get better support more quickly if you ask in ** #matrix:matrix.org ** ;)
|
||||||
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
|
||||||
|
|
||||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
|
||||||
|
|
||||||
This is a bug report template. By following the instructions below and
|
This is a bug report template. By following the instructions below and
|
||||||
filling out the sections with your information, you will help the us to get all
|
filling out the sections with your information, you will help the us to get all
|
||||||
@@ -46,26 +44,22 @@ those (please be careful to remove any personal or private data). Please surroun
|
|||||||
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||||
|
|
||||||
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||||
- **Homeserver**:
|
- **Homeserver**:
|
||||||
|
|
||||||
If not matrix.org:
|
If not matrix.org:
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
What version of Synapse is running?
|
What version of Synapse is running?
|
||||||
|
You can find the Synapse version by inspecting the server headers (replace matrix.org with
|
||||||
You can find the Synapse version with this command:
|
your own homeserver domain):
|
||||||
|
$ curl -v https://matrix.org/_matrix/client/versions 2>&1 | grep "Server:"
|
||||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
|
||||||
|
|
||||||
(You may need to replace `localhost:8008` if Synapse is not configured to
|
|
||||||
listen on that port.)
|
|
||||||
-->
|
-->
|
||||||
- **Version**:
|
- **Version**:
|
||||||
|
|
||||||
- **Install method**:
|
- **Install method**:
|
||||||
<!-- examples: package manager/git clone/pip -->
|
<!-- examples: package manager/git clone/pip -->
|
||||||
|
|
||||||
- **Platform**:
|
- **Platform**:
|
||||||
<!--
|
<!--
|
||||||
Tell us about the environment in which your homeserver is operating
|
Tell us about the environment in which your homeserver is operating
|
||||||
distro, hardware, if it's running in a vm/container, etc.
|
distro, hardware, if it's running in a vm/container, etc.
|
||||||
|
|||||||
@@ -1,12 +1,7 @@
|
|||||||
### Pull Request Checklist
|
### Pull Request Checklist
|
||||||
|
|
||||||
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
<!-- Please read CONTRIBUTING.rst before submitting your pull request -->
|
||||||
|
|
||||||
* [ ] Pull request is based on the develop branch
|
* [ ] Pull request is based on the develop branch
|
||||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#changelog)
|
||||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#sign-off)
|
||||||
- Use markdown where necessary, mostly for `code blocks`.
|
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
|
||||||
- Start with a capital letter.
|
|
||||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
|
||||||
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
|
||||||
|
|||||||
@@ -1,322 +0,0 @@
|
|||||||
name: Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
toxenv:
|
|
||||||
- "check-sampleconfig"
|
|
||||||
- "check_codestyle"
|
|
||||||
- "check_isort"
|
|
||||||
- "mypy"
|
|
||||||
- "packaging"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e ${{ matrix.toxenv }}
|
|
||||||
|
|
||||||
lint-crlf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Check line endings
|
|
||||||
run: scripts-dev/check_line_terminators.sh
|
|
||||||
|
|
||||||
lint-newsfile:
|
|
||||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install tox
|
|
||||||
- name: Patch Buildkite-specific test script
|
|
||||||
run: |
|
|
||||||
sed -i -e 's/\$BUILDKITE_PULL_REQUEST/${{ github.event.number }}/' \
|
|
||||||
scripts-dev/check-newsfragment
|
|
||||||
- run: scripts-dev/check-newsfragment
|
|
||||||
|
|
||||||
lint-sdist:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: pip install wheel
|
|
||||||
- run: python setup.py sdist bdist_wheel
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: Python Distributions
|
|
||||||
path: dist/*
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
|
||||||
linting-done:
|
|
||||||
if: ${{ always() }} # Run this even if prior jobs were skipped
|
|
||||||
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: "true"
|
|
||||||
|
|
||||||
trial:
|
|
||||||
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.6", "3.7", "3.8", "3.9"]
|
|
||||||
database: ["sqlite"]
|
|
||||||
include:
|
|
||||||
# Newest Python without optional deps
|
|
||||||
- python-version: "3.9"
|
|
||||||
toxenv: "py-noextras,combine"
|
|
||||||
|
|
||||||
# Oldest Python with PostgreSQL
|
|
||||||
- python-version: "3.6"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
|
|
||||||
# Newest Python with PostgreSQL
|
|
||||||
- python-version: "3.9"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "13"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- run: pip install tox
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
- run: tox -e py,combine
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-olddeps:
|
|
||||||
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Test with old deps
|
|
||||||
uses: docker://ubuntu:bionic # For old python and sqlite
|
|
||||||
with:
|
|
||||||
workdir: /github/workspace
|
|
||||||
entrypoint: .buildkite/scripts/test_old_deps.sh
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
- name: Dump logs
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-pypy:
|
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["pypy-3.6"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e py,combine
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
- name: Dump logs
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
if: ${{ !failure() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
env:
|
|
||||||
BUILDKITE_BRANCH: ${{ github.head_ref }}
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: bionic
|
|
||||||
|
|
||||||
- sytest-tag: bionic
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: testing
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: bionic
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .buildkite/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Dump results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: cat /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
portdb:
|
|
||||||
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python-version: "3.6"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
|
|
||||||
- python-version: "3.9"
|
|
||||||
postgres-version: "13"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:${{ matrix.postgres-version }}
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Patch Buildkite-specific test scripts
|
|
||||||
run: |
|
|
||||||
sed -i -e 's/host="postgres"/host="localhost"/' .buildkite/scripts/create_postgres_db.py
|
|
||||||
sed -i -e 's/host: postgres/host: localhost/' .buildkite/postgres-config.yaml
|
|
||||||
sed -i -e 's|/src/||' .buildkite/{sqlite,postgres}-config.yaml
|
|
||||||
sed -i -e 's/\$TOP/\$GITHUB_WORKSPACE/' .coveragerc
|
|
||||||
- run: .buildkite/scripts/test_synapse_port_db.sh
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: ${{ !failure() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
|
||||||
image: matrixdotorg/complement:latest
|
|
||||||
env:
|
|
||||||
CI: true
|
|
||||||
ports:
|
|
||||||
- 8448:8448
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for complement
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: "matrix-org/complement"
|
|
||||||
path: complement
|
|
||||||
|
|
||||||
# Build initial Synapse image
|
|
||||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
|
||||||
working-directory: synapse
|
|
||||||
|
|
||||||
# Build a ready-to-run Synapse image based on the initial image above.
|
|
||||||
# This new image includes a config file, keys for signing and TLS, and
|
|
||||||
# other settings to make it suitable for testing under Complement.
|
|
||||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
|
||||||
working-directory: complement/dockerfiles
|
|
||||||
|
|
||||||
# Run Complement
|
|
||||||
- run: go test -v -tags synapse_blacklist ./tests
|
|
||||||
env:
|
|
||||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
|
||||||
working-directory: complement
|
|
||||||
+3
-11
@@ -6,27 +6,19 @@
|
|||||||
*.egg
|
*.egg
|
||||||
*.egg-info
|
*.egg-info
|
||||||
*.lock
|
*.lock
|
||||||
*.py[cod]
|
*.pyc
|
||||||
*.snap
|
|
||||||
*.tac
|
*.tac
|
||||||
_trial_temp/
|
_trial_temp/
|
||||||
_trial_temp*/
|
_trial_temp*/
|
||||||
/out
|
|
||||||
.DS_Store
|
|
||||||
__pycache__/
|
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
/*.log
|
/*.log
|
||||||
/*.log.*
|
|
||||||
/*.log.config
|
/*.log.config
|
||||||
/*.pid
|
/*.pid
|
||||||
/.python-version
|
|
||||||
/*.signing.key
|
/*.signing.key
|
||||||
/env/
|
/env/
|
||||||
/.venv*/
|
|
||||||
/homeserver*.yaml
|
/homeserver*.yaml
|
||||||
/logs
|
|
||||||
/media_store/
|
/media_store/
|
||||||
/uploads
|
/uploads
|
||||||
|
|
||||||
@@ -36,9 +28,8 @@ __pycache__/
|
|||||||
/.vscode/
|
/.vscode/
|
||||||
|
|
||||||
# build products
|
# build products
|
||||||
!/.coveragerc
|
|
||||||
/.coverage*
|
/.coverage*
|
||||||
/.mypy_cache/
|
!/.coveragerc
|
||||||
/.tox
|
/.tox
|
||||||
/build/
|
/build/
|
||||||
/coverage.*
|
/coverage.*
|
||||||
@@ -46,3 +37,4 @@ __pycache__/
|
|||||||
/docs/build/
|
/docs/build/
|
||||||
/htmlcov
|
/htmlcov
|
||||||
/pip-wheel-metadata/
|
/pip-wheel-metadata/
|
||||||
|
|
||||||
|
|||||||
+35
-9
@@ -1,8 +1,34 @@
|
|||||||
The following is an incomplete list of people outside the core team who have
|
Erik Johnston <erik at matrix.org>
|
||||||
contributed to Synapse. It is no longer maintained: more recent contributions
|
* HS core
|
||||||
are listed in the `changelog <CHANGES.md>`_.
|
* Federation API impl
|
||||||
|
|
||||||
----
|
Mark Haines <mark at matrix.org>
|
||||||
|
* HS core
|
||||||
|
* Crypto
|
||||||
|
* Content repository
|
||||||
|
* CS v2 API impl
|
||||||
|
|
||||||
|
Kegan Dougal <kegan at matrix.org>
|
||||||
|
* HS core
|
||||||
|
* CS v1 API impl
|
||||||
|
* AS API impl
|
||||||
|
|
||||||
|
Paul "LeoNerd" Evans <paul at matrix.org>
|
||||||
|
* HS core
|
||||||
|
* Presence
|
||||||
|
* Typing Notifications
|
||||||
|
* Performance metrics and caching layer
|
||||||
|
|
||||||
|
Dave Baker <dave at matrix.org>
|
||||||
|
* Push notifications
|
||||||
|
* Auth CS v2 impl
|
||||||
|
|
||||||
|
Matthew Hodgson <matthew at matrix.org>
|
||||||
|
* General doc & housekeeping
|
||||||
|
* Vertobot/vertobridge matrix<->verto PoC
|
||||||
|
|
||||||
|
Emmanuel Rohee <manu at matrix.org>
|
||||||
|
* Supporting iOS clients (testability and fallback registration)
|
||||||
|
|
||||||
Turned to Dust <dwinslow86 at gmail.com>
|
Turned to Dust <dwinslow86 at gmail.com>
|
||||||
* ArchLinux installation instructions
|
* ArchLinux installation instructions
|
||||||
@@ -36,16 +62,16 @@ Christoph Witzany <christoph at web.crofting.com>
|
|||||||
* Add LDAP support for authentication
|
* Add LDAP support for authentication
|
||||||
|
|
||||||
Pierre Jaury <pierre at jaury.eu>
|
Pierre Jaury <pierre at jaury.eu>
|
||||||
* Docker packaging
|
* Docker packaging
|
||||||
|
|
||||||
Serban Constantin <serban.constantin at gmail dot com>
|
Serban Constantin <serban.constantin at gmail dot com>
|
||||||
* Small bug fix
|
* Small bug fix
|
||||||
|
|
||||||
|
Jason Robinson <jasonr at matrix.org>
|
||||||
|
* Minor fixes
|
||||||
|
|
||||||
Joseph Weston <joseph at weston.cloud>
|
Joseph Weston <joseph at weston.cloud>
|
||||||
* Add admin API for querying HS version
|
+ Add admin API for querying HS version
|
||||||
|
|
||||||
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||||
* Documentation improvements
|
* Documentation improvements
|
||||||
|
|
||||||
Werner Sembach <werner.sembach at fau dot de>
|
|
||||||
* Automatically remove a group/community when it is empty
|
|
||||||
|
|||||||
+150
-3910
File diff suppressed because it is too large
Load Diff
-397
@@ -1,397 +0,0 @@
|
|||||||
Welcome to Synapse
|
|
||||||
|
|
||||||
This document aims to get you started with contributing to this repo!
|
|
||||||
|
|
||||||
- [1. Who can contribute to Synapse?](#1-who-can-contribute-to-synapse)
|
|
||||||
- [2. What do I need?](#2-what-do-i-need)
|
|
||||||
- [3. Get the source.](#3-get-the-source)
|
|
||||||
- [4. Install the dependencies](#4-install-the-dependencies)
|
|
||||||
* [Under Unix (macOS, Linux, BSD, ...)](#under-unix-macos-linux-bsd-)
|
|
||||||
* [Under Windows](#under-windows)
|
|
||||||
- [5. Get in touch.](#5-get-in-touch)
|
|
||||||
- [6. Pick an issue.](#6-pick-an-issue)
|
|
||||||
- [7. Turn coffee and documentation into code and documentation!](#7-turn-coffee-and-documentation-into-code-and-documentation)
|
|
||||||
- [8. Test, test, test!](#8-test-test-test)
|
|
||||||
* [Run the linters.](#run-the-linters)
|
|
||||||
* [Run the unit tests.](#run-the-unit-tests)
|
|
||||||
* [Run the integration tests.](#run-the-integration-tests)
|
|
||||||
- [9. Submit your patch.](#9-submit-your-patch)
|
|
||||||
* [Changelog](#changelog)
|
|
||||||
+ [How do I know what to call the changelog file before I create the PR?](#how-do-i-know-what-to-call-the-changelog-file-before-i-create-the-pr)
|
|
||||||
+ [Debian changelog](#debian-changelog)
|
|
||||||
* [Sign off](#sign-off)
|
|
||||||
- [10. Turn feedback into better code.](#10-turn-feedback-into-better-code)
|
|
||||||
- [11. Find a new issue.](#11-find-a-new-issue)
|
|
||||||
- [Notes for maintainers on merging PRs etc](#notes-for-maintainers-on-merging-prs-etc)
|
|
||||||
- [Conclusion](#conclusion)
|
|
||||||
|
|
||||||
# 1. Who can contribute to Synapse?
|
|
||||||
|
|
||||||
Everyone is welcome to contribute code to [matrix.org
|
|
||||||
projects](https://github.com/matrix-org), provided that they are willing to
|
|
||||||
license their contributions under the same license as the project itself. We
|
|
||||||
follow a simple 'inbound=outbound' model for contributions: the act of
|
|
||||||
submitting an 'inbound' contribution means that the contributor agrees to
|
|
||||||
license the code under the same terms as the project's overall 'outbound'
|
|
||||||
license - in our case, this is almost always Apache Software License v2 (see
|
|
||||||
[LICENSE](LICENSE)).
|
|
||||||
|
|
||||||
# 2. What do I need?
|
|
||||||
|
|
||||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
|
|
||||||
|
|
||||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
|
||||||
|
|
||||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
|
||||||
|
|
||||||
|
|
||||||
# 3. Get the source.
|
|
||||||
|
|
||||||
The preferred and easiest way to contribute changes is to fork the relevant
|
|
||||||
project on GitHub, and then [create a pull request](
|
|
||||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
|
||||||
changes into our repo.
|
|
||||||
|
|
||||||
Please base your changes on the `develop` branch.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
|
|
||||||
git checkout develop
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need help getting started with git, this is beyond the scope of the document, but you
|
|
||||||
can find many good git tutorials on the web.
|
|
||||||
|
|
||||||
# 4. Install the dependencies
|
|
||||||
|
|
||||||
## Under Unix (macOS, Linux, BSD, ...)
|
|
||||||
|
|
||||||
Once you have installed Python 3 and added the source, please open a terminal and
|
|
||||||
setup a *virtualenv*, as follows:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
cd path/where/you/have/cloned/the/repository
|
|
||||||
python3 -m venv ./env
|
|
||||||
source ./env/bin/activate
|
|
||||||
pip install -e ".[all,lint,mypy,test]"
|
|
||||||
pip install tox
|
|
||||||
```
|
|
||||||
|
|
||||||
This will install the developer dependencies for the project.
|
|
||||||
|
|
||||||
## Under Windows
|
|
||||||
|
|
||||||
TBD
|
|
||||||
|
|
||||||
|
|
||||||
# 5. Get in touch.
|
|
||||||
|
|
||||||
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
|
||||||
|
|
||||||
|
|
||||||
# 6. Pick an issue.
|
|
||||||
|
|
||||||
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
|
|
||||||
to work on.
|
|
||||||
|
|
||||||
|
|
||||||
# 7. Turn coffee and documentation into code and documentation!
|
|
||||||
|
|
||||||
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
|
||||||
it, including the conventions for the [sample configuration
|
|
||||||
file](docs/code_style.md#configuration-file-format).
|
|
||||||
|
|
||||||
There is a growing amount of documentation located in the [docs](docs)
|
|
||||||
directory. This documentation is intended primarily for sysadmins running their
|
|
||||||
own Synapse instance, as well as developers interacting externally with
|
|
||||||
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
|
||||||
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
|
||||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
|
||||||
service developers.
|
|
||||||
|
|
||||||
If you add new files added to either of these folders, please use [GitHub-Flavoured
|
|
||||||
Markdown](https://guides.github.com/features/mastering-markdown/).
|
|
||||||
|
|
||||||
Some documentation also exists in [Synapse's GitHub
|
|
||||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
|
||||||
contributed to by community authors.
|
|
||||||
|
|
||||||
|
|
||||||
# 8. Test, test, test!
|
|
||||||
<a name="test-test-test"></a>
|
|
||||||
|
|
||||||
While you're developing and before submitting a patch, you'll
|
|
||||||
want to test your code.
|
|
||||||
|
|
||||||
## Run the linters.
|
|
||||||
|
|
||||||
The linters look at your code and do two things:
|
|
||||||
|
|
||||||
- ensure that your code follows the coding style adopted by the project;
|
|
||||||
- catch a number of errors in your code.
|
|
||||||
|
|
||||||
They're pretty fast, don't hesitate!
|
|
||||||
|
|
||||||
```sh
|
|
||||||
source ./env/bin/activate
|
|
||||||
./scripts-dev/lint.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that this script *will modify your files* to fix styling errors.
|
|
||||||
Make sure that you have saved all your files.
|
|
||||||
|
|
||||||
If you wish to restrict the linters to only the files changed since the last commit
|
|
||||||
(much faster!), you can instead run:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
source ./env/bin/activate
|
|
||||||
./scripts-dev/lint.sh -d
|
|
||||||
```
|
|
||||||
|
|
||||||
Or if you know exactly which files you wish to lint, you can instead run:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
source ./env/bin/activate
|
|
||||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
|
||||||
```
|
|
||||||
|
|
||||||
## Run the unit tests.
|
|
||||||
|
|
||||||
The unit tests run parts of Synapse, including your changes, to see if anything
|
|
||||||
was broken. They are slower than the linters but will typically catch more errors.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
source ./env/bin/activate
|
|
||||||
trial tests
|
|
||||||
```
|
|
||||||
|
|
||||||
If you wish to only run *some* unit tests, you may specify
|
|
||||||
another module instead of `tests` - or a test class or a method:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
source ./env/bin/activate
|
|
||||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
|
||||||
```
|
|
||||||
|
|
||||||
If your tests fail, you may wish to look at the logs:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
less _trial_temp/test.log
|
|
||||||
```
|
|
||||||
|
|
||||||
## Run the integration tests.
|
|
||||||
|
|
||||||
The integration tests are a more comprehensive suite of tests. They
|
|
||||||
run a full version of Synapse, including your changes, to check if
|
|
||||||
anything was broken. They are slower than the unit tests but will
|
|
||||||
typically catch more errors.
|
|
||||||
|
|
||||||
The following command will let you run the integration test with the most common
|
|
||||||
configuration:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:py37
|
|
||||||
```
|
|
||||||
|
|
||||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
|
||||||
|
|
||||||
|
|
||||||
# 9. Submit your patch.
|
|
||||||
|
|
||||||
Once you're happy with your patch, it's time to prepare a Pull Request.
|
|
||||||
|
|
||||||
To prepare a Pull Request, please:
|
|
||||||
|
|
||||||
1. verify that [all the tests pass](#test-test-test), including the coding style;
|
|
||||||
2. [sign off](#sign-off) your contribution;
|
|
||||||
3. `git push` your commit to your fork of Synapse;
|
|
||||||
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
|
||||||
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
|
||||||
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
|
||||||
|
|
||||||
|
|
||||||
## Changelog
|
|
||||||
|
|
||||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
|
||||||
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
|
||||||
|
|
||||||
To create a changelog entry, make a new file in the `changelog.d` directory named
|
|
||||||
in the format of `PRnumber.type`. The type can be one of the following:
|
|
||||||
|
|
||||||
* `feature`
|
|
||||||
* `bugfix`
|
|
||||||
* `docker` (for updates to the Docker image)
|
|
||||||
* `doc` (for updates to the documentation)
|
|
||||||
* `removal` (also used for deprecations)
|
|
||||||
* `misc` (for internal-only changes)
|
|
||||||
|
|
||||||
This file will become part of our [changelog](
|
|
||||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
|
||||||
release, so the content of the file should be a short description of your
|
|
||||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
|
||||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
|
||||||
consistency.
|
|
||||||
|
|
||||||
Adding credits to the changelog is encouraged, we value your
|
|
||||||
contributions and would like to have you shouted out in the release notes!
|
|
||||||
|
|
||||||
For example, a fix in PR #1234 would have its changelog entry in
|
|
||||||
`changelog.d/1234.bugfix`, and contain content like:
|
|
||||||
|
|
||||||
> The security levels of Florbs are now validated when received
|
|
||||||
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
|
||||||
|
|
||||||
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
|
||||||
then the content for each `changelog.d` file should be the same. Towncrier will
|
|
||||||
merge the matching files together into a single changelog entry when we come to
|
|
||||||
release.
|
|
||||||
|
|
||||||
### How do I know what to call the changelog file before I create the PR?
|
|
||||||
|
|
||||||
Obviously, you don't know if you should call your newsfile
|
|
||||||
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
|
||||||
chicken-and-egg problem.
|
|
||||||
|
|
||||||
There are two options for solving this:
|
|
||||||
|
|
||||||
1. Open the PR without a changelog file, see what number you got, and *then*
|
|
||||||
add the changelog file to your branch (see [Updating your pull
|
|
||||||
request](#updating-your-pull-request)), or:
|
|
||||||
|
|
||||||
1. Look at the [list of all
|
|
||||||
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
|
||||||
highest number you see, and quickly open the PR before somebody else claims
|
|
||||||
your number.
|
|
||||||
|
|
||||||
[This
|
|
||||||
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
|
||||||
might be helpful if you find yourself doing this a lot.
|
|
||||||
|
|
||||||
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
|
||||||
to put together a release!
|
|
||||||
|
|
||||||
### Debian changelog
|
|
||||||
|
|
||||||
Changes which affect the debian packaging files (in `debian`) are an
|
|
||||||
exception to the rule that all changes require a `changelog.d` file.
|
|
||||||
|
|
||||||
In this case, you will need to add an entry to the debian changelog for the
|
|
||||||
next release. For this, run the following command:
|
|
||||||
|
|
||||||
```
|
|
||||||
dch
|
|
||||||
```
|
|
||||||
|
|
||||||
This will make up a new version number (if there isn't already an unreleased
|
|
||||||
version in flight), and open an editor where you can add a new changelog entry.
|
|
||||||
(Our release process will ensure that the version number and maintainer name is
|
|
||||||
corrected for the release.)
|
|
||||||
|
|
||||||
If your change affects both the debian packaging *and* files outside the debian
|
|
||||||
directory, you will need both a regular newsfragment *and* an entry in the
|
|
||||||
debian changelog. (Though typically such changes should be submitted as two
|
|
||||||
separate pull requests.)
|
|
||||||
|
|
||||||
## Sign off
|
|
||||||
|
|
||||||
In order to have a concrete record that your contribution is intentional
|
|
||||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
|
||||||
same lightweight approach that the Linux Kernel
|
|
||||||
[submitting patches process](
|
|
||||||
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
|
||||||
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
|
||||||
projects use: the DCO (Developer Certificate of Origin:
|
|
||||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
|
||||||
the contribution or otherwise have the right to contribute it to Matrix:
|
|
||||||
|
|
||||||
```
|
|
||||||
Developer Certificate of Origin
|
|
||||||
Version 1.1
|
|
||||||
|
|
||||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
|
||||||
660 York Street, Suite 102,
|
|
||||||
San Francisco, CA 94110 USA
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies of this
|
|
||||||
license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Developer's Certificate of Origin 1.1
|
|
||||||
|
|
||||||
By making a contribution to this project, I certify that:
|
|
||||||
|
|
||||||
(a) The contribution was created in whole or in part by me and I
|
|
||||||
have the right to submit it under the open source license
|
|
||||||
indicated in the file; or
|
|
||||||
|
|
||||||
(b) The contribution is based upon previous work that, to the best
|
|
||||||
of my knowledge, is covered under an appropriate open source
|
|
||||||
license and I have the right under that license to submit that
|
|
||||||
work with modifications, whether created in whole or in part
|
|
||||||
by me, under the same open source license (unless I am
|
|
||||||
permitted to submit under a different license), as indicated
|
|
||||||
in the file; or
|
|
||||||
|
|
||||||
(c) The contribution was provided directly to me by some other
|
|
||||||
person who certified (a), (b) or (c) and I have not modified
|
|
||||||
it.
|
|
||||||
|
|
||||||
(d) I understand and agree that this project and the contribution
|
|
||||||
are public and that a record of the contribution (including all
|
|
||||||
personal information I submit with it, including my sign-off) is
|
|
||||||
maintained indefinitely and may be redistributed consistent with
|
|
||||||
this project or the open source license(s) involved.
|
|
||||||
```
|
|
||||||
|
|
||||||
If you agree to this for your contribution, then all that's needed is to
|
|
||||||
include the line in your commit or pull request comment:
|
|
||||||
|
|
||||||
```
|
|
||||||
Signed-off-by: Your Name <your@email.example.org>
|
|
||||||
```
|
|
||||||
|
|
||||||
We accept contributions under a legally identifiable name, such as
|
|
||||||
your name on government documentation or common-law names (names
|
|
||||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
|
||||||
accept anonymous contributions at this time.
|
|
||||||
|
|
||||||
Git allows you to add this signoff automatically when using the `-s`
|
|
||||||
flag to `git commit`, which uses the name and email set in your
|
|
||||||
`user.name` and `user.email` git configs.
|
|
||||||
|
|
||||||
|
|
||||||
# 10. Turn feedback into better code.
|
|
||||||
|
|
||||||
Once the Pull Request is opened, you will see a few things:
|
|
||||||
|
|
||||||
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
|
|
||||||
2. one or more of the developers will take a look at your Pull Request and offer feedback.
|
|
||||||
|
|
||||||
From this point, you should:
|
|
||||||
|
|
||||||
1. Look at the results of the CI pipeline.
|
|
||||||
- If there is any error, fix the error.
|
|
||||||
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
|
||||||
3. Create a new commit with the changes.
|
|
||||||
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
|
||||||
- Push this commits to your Pull Request.
|
|
||||||
4. Back to 1.
|
|
||||||
|
|
||||||
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
|
||||||
|
|
||||||
# 11. Find a new issue.
|
|
||||||
|
|
||||||
By now, you know the drill!
|
|
||||||
|
|
||||||
# Notes for maintainers on merging PRs etc
|
|
||||||
|
|
||||||
There are some notes for those with commit access to the project on how we
|
|
||||||
manage git [here](docs/dev/git.md).
|
|
||||||
|
|
||||||
# Conclusion
|
|
||||||
|
|
||||||
That's it! Matrix is a very open and collaborative project as you might expect
|
|
||||||
given our obsession with open communication. If we're going to successfully
|
|
||||||
matrix together all the fragmented communication technologies out there we are
|
|
||||||
reliant on contributions and collaboration from the community to do so. So
|
|
||||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
|
||||||
do!
|
|
||||||
@@ -0,0 +1,198 @@
|
|||||||
|
Contributing code to Matrix
|
||||||
|
===========================
|
||||||
|
|
||||||
|
Everyone is welcome to contribute code to Matrix
|
||||||
|
(https://github.com/matrix-org), provided that they are willing to license
|
||||||
|
their contributions under the same license as the project itself. We follow a
|
||||||
|
simple 'inbound=outbound' model for contributions: the act of submitting an
|
||||||
|
'inbound' contribution means that the contributor agrees to license the code
|
||||||
|
under the same terms as the project's overall 'outbound' license - in our
|
||||||
|
case, this is almost always Apache Software License v2 (see LICENSE).
|
||||||
|
|
||||||
|
How to contribute
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The preferred and easiest way to contribute changes to Matrix is to fork the
|
||||||
|
relevant project on github, and then create a pull request to ask us to pull
|
||||||
|
your changes into our repo
|
||||||
|
(https://help.github.com/articles/using-pull-requests/)
|
||||||
|
|
||||||
|
**The single biggest thing you need to know is: please base your changes on
|
||||||
|
the develop branch - /not/ master.**
|
||||||
|
|
||||||
|
We use the master branch to track the most recent release, so that folks who
|
||||||
|
blindly clone the repo and automatically check out master get something that
|
||||||
|
works. Develop is the unstable branch where all the development actually
|
||||||
|
happens: the workflow is that contributors should fork the develop branch to
|
||||||
|
make a 'feature' branch for a particular contribution, and then make a pull
|
||||||
|
request to merge this back into the matrix.org 'official' develop branch. We
|
||||||
|
use github's pull request workflow to review the contribution, and either ask
|
||||||
|
you to make any refinements needed or merge it and make them ourselves. The
|
||||||
|
changes will then land on master when we next do a release.
|
||||||
|
|
||||||
|
We use `Buildkite <https://buildkite.com/matrix-dot-org/synapse>`_ for
|
||||||
|
continuous integration. Buildkite builds need to be authorised by a
|
||||||
|
maintainer. If your change breaks the build, this will be shown in GitHub, so
|
||||||
|
please keep an eye on the pull request for feedback.
|
||||||
|
|
||||||
|
To run unit tests in a local development environment, you can use:
|
||||||
|
|
||||||
|
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||||
|
for SQLite-backed Synapse on Python 3.5.
|
||||||
|
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||||
|
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||||
|
(requires a running local PostgreSQL with access to create databases).
|
||||||
|
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||||
|
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||||
|
set up PostgreSQL yourself.
|
||||||
|
|
||||||
|
Docker images are available for running the integration tests (SyTest) locally,
|
||||||
|
see the `documentation in the SyTest repo
|
||||||
|
<https://github.com/matrix-org/sytest/blob/develop/docker/README.md>`_ for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
Code style
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
All Matrix projects have a well-defined code-style - and sometimes we've even
|
||||||
|
got as far as documenting it... For instance, synapse's code style doc lives
|
||||||
|
at https://github.com/matrix-org/synapse/tree/master/docs/code_style.rst.
|
||||||
|
|
||||||
|
Please ensure your changes match the cosmetic style of the existing project,
|
||||||
|
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||||
|
makes it horribly hard to review otherwise.
|
||||||
|
|
||||||
|
Changelog
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||||
|
entry. These are managed by Towncrier
|
||||||
|
(https://github.com/hawkowl/towncrier).
|
||||||
|
|
||||||
|
To create a changelog entry, make a new file in the ``changelog.d`` file named
|
||||||
|
in the format of ``PRnumber.type``. The type can be one of the following:
|
||||||
|
|
||||||
|
* ``feature``.
|
||||||
|
* ``bugfix``.
|
||||||
|
* ``docker`` (for updates to the Docker image).
|
||||||
|
* ``doc`` (for updates to the documentation).
|
||||||
|
* ``removal`` (also used for deprecations).
|
||||||
|
* ``misc`` (for internal-only changes).
|
||||||
|
|
||||||
|
The content of the file is your changelog entry, which should be a short
|
||||||
|
description of your change in the same style as the rest of our `changelog
|
||||||
|
<https://github.com/matrix-org/synapse/blob/master/CHANGES.md>`_. The file can
|
||||||
|
contain Markdown formatting, and should end with a full stop ('.') for
|
||||||
|
consistency.
|
||||||
|
|
||||||
|
Adding credits to the changelog is encouraged, we value your
|
||||||
|
contributions and would like to have you shouted out in the release notes!
|
||||||
|
|
||||||
|
For example, a fix in PR #1234 would have its changelog entry in
|
||||||
|
``changelog.d/1234.bugfix``, and contain content like "The security levels of
|
||||||
|
Florbs are now validated when recieved over federation. Contributed by Jane
|
||||||
|
Matrix.".
|
||||||
|
|
||||||
|
Debian changelog
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Changes which affect the debian packaging files (in ``debian``) are an
|
||||||
|
exception.
|
||||||
|
|
||||||
|
In this case, you will need to add an entry to the debian changelog for the
|
||||||
|
next release. For this, run the following command::
|
||||||
|
|
||||||
|
dch
|
||||||
|
|
||||||
|
This will make up a new version number (if there isn't already an unreleased
|
||||||
|
version in flight), and open an editor where you can add a new changelog entry.
|
||||||
|
(Our release process will ensure that the version number and maintainer name is
|
||||||
|
corrected for the release.)
|
||||||
|
|
||||||
|
If your change affects both the debian packaging *and* files outside the debian
|
||||||
|
directory, you will need both a regular newsfragment *and* an entry in the
|
||||||
|
debian changelog. (Though typically such changes should be submitted as two
|
||||||
|
separate pull requests.)
|
||||||
|
|
||||||
|
Attribution
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
Everyone who contributes anything to Matrix is welcome to be listed in the
|
||||||
|
AUTHORS.rst file for the project in question. Please feel free to include a
|
||||||
|
change to AUTHORS.rst in your pull request to list yourself and a short
|
||||||
|
description of the area(s) you've worked on. Also, we sometimes have swag to
|
||||||
|
give away to contributors - if you feel that Matrix-branded apparel is missing
|
||||||
|
from your life, please mail us your shipping address to matrix at matrix.org and
|
||||||
|
we'll try to fix it :)
|
||||||
|
|
||||||
|
Sign off
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
In order to have a concrete record that your contribution is intentional
|
||||||
|
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||||
|
same lightweight approach that the Linux Kernel
|
||||||
|
`submitting patches process <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>`_, Docker
|
||||||
|
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||||
|
projects use: the DCO (Developer Certificate of Origin:
|
||||||
|
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||||
|
the contribution or otherwise have the right to contribute it to Matrix::
|
||||||
|
|
||||||
|
Developer Certificate of Origin
|
||||||
|
Version 1.1
|
||||||
|
|
||||||
|
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||||
|
660 York Street, Suite 102,
|
||||||
|
San Francisco, CA 94110 USA
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies of this
|
||||||
|
license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Developer's Certificate of Origin 1.1
|
||||||
|
|
||||||
|
By making a contribution to this project, I certify that:
|
||||||
|
|
||||||
|
(a) The contribution was created in whole or in part by me and I
|
||||||
|
have the right to submit it under the open source license
|
||||||
|
indicated in the file; or
|
||||||
|
|
||||||
|
(b) The contribution is based upon previous work that, to the best
|
||||||
|
of my knowledge, is covered under an appropriate open source
|
||||||
|
license and I have the right under that license to submit that
|
||||||
|
work with modifications, whether created in whole or in part
|
||||||
|
by me, under the same open source license (unless I am
|
||||||
|
permitted to submit under a different license), as indicated
|
||||||
|
in the file; or
|
||||||
|
|
||||||
|
(c) The contribution was provided directly to me by some other
|
||||||
|
person who certified (a), (b) or (c) and I have not modified
|
||||||
|
it.
|
||||||
|
|
||||||
|
(d) I understand and agree that this project and the contribution
|
||||||
|
are public and that a record of the contribution (including all
|
||||||
|
personal information I submit with it, including my sign-off) is
|
||||||
|
maintained indefinitely and may be redistributed consistent with
|
||||||
|
this project or the open source license(s) involved.
|
||||||
|
|
||||||
|
If you agree to this for your contribution, then all that's needed is to
|
||||||
|
include the line in your commit or pull request comment::
|
||||||
|
|
||||||
|
Signed-off-by: Your Name <your@email.example.org>
|
||||||
|
|
||||||
|
We accept contributions under a legally identifiable name, such as
|
||||||
|
your name on government documentation or common-law names (names
|
||||||
|
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||||
|
accept anonymous contributions at this time.
|
||||||
|
|
||||||
|
Git allows you to add this signoff automatically when using the ``-s``
|
||||||
|
flag to ``git commit``, which uses the name and email set in your
|
||||||
|
``user.name`` and ``user.email`` git configs.
|
||||||
|
|
||||||
|
Conclusion
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
That's it! Matrix is a very open and collaborative project as you might expect
|
||||||
|
given our obsession with open communication. If we're going to successfully
|
||||||
|
matrix together all the fragmented communication technologies out there we are
|
||||||
|
reliant on contributions and collaboration from the community to do so. So
|
||||||
|
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||||
|
do!
|
||||||
+213
-342
@@ -1,45 +1,17 @@
|
|||||||
# Installation Instructions
|
- [Choosing your server name](#choosing-your-server-name)
|
||||||
|
- [Installing Synapse](#installing-synapse)
|
||||||
There are 3 steps to follow under **Installation Instructions**.
|
- [Installing from source](#installing-from-source)
|
||||||
|
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||||
- [Installation Instructions](#installation-instructions)
|
|
||||||
- [Choosing your server name](#choosing-your-server-name)
|
|
||||||
- [Installing Synapse](#installing-synapse)
|
|
||||||
- [Installing from source](#installing-from-source)
|
|
||||||
- [Platform-specific prerequisites](#platform-specific-prerequisites)
|
|
||||||
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
|
|
||||||
- [ArchLinux](#archlinux)
|
|
||||||
- [CentOS/Fedora](#centosfedora)
|
|
||||||
- [macOS](#macos)
|
|
||||||
- [OpenSUSE](#opensuse)
|
|
||||||
- [OpenBSD](#openbsd)
|
|
||||||
- [Windows](#windows)
|
|
||||||
- [Prebuilt packages](#prebuilt-packages)
|
|
||||||
- [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
|
|
||||||
- [Debian/Ubuntu](#debianubuntu)
|
|
||||||
- [Matrix.org packages](#matrixorg-packages)
|
|
||||||
- [Downstream Debian packages](#downstream-debian-packages)
|
|
||||||
- [Downstream Ubuntu packages](#downstream-ubuntu-packages)
|
|
||||||
- [Fedora](#fedora)
|
|
||||||
- [OpenSUSE](#opensuse-1)
|
|
||||||
- [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
|
|
||||||
- [ArchLinux](#archlinux-1)
|
|
||||||
- [Void Linux](#void-linux)
|
|
||||||
- [FreeBSD](#freebsd)
|
|
||||||
- [OpenBSD](#openbsd-1)
|
|
||||||
- [NixOS](#nixos)
|
|
||||||
- [Setting up Synapse](#setting-up-synapse)
|
|
||||||
- [Using PostgreSQL](#using-postgresql)
|
|
||||||
- [TLS certificates](#tls-certificates)
|
|
||||||
- [Client Well-Known URI](#client-well-known-uri)
|
|
||||||
- [Email](#email)
|
|
||||||
- [Registering a user](#registering-a-user)
|
|
||||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
|
||||||
- [URL previews](#url-previews)
|
|
||||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||||
|
- [Prebuilt packages](#prebuilt-packages)
|
||||||
|
- [Setting up Synapse](#setting-up-synapse)
|
||||||
|
- [TLS certificates](#tls-certificates)
|
||||||
|
- [Email](#email)
|
||||||
|
- [Registering a user](#registering-a-user)
|
||||||
|
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||||
|
- [URL previews](#url-previews)
|
||||||
|
|
||||||
|
# Choosing your server name
|
||||||
## Choosing your server name
|
|
||||||
|
|
||||||
It is important to choose the name for your server before you install Synapse,
|
It is important to choose the name for your server before you install Synapse,
|
||||||
because it cannot be changed later.
|
because it cannot be changed later.
|
||||||
@@ -55,24 +27,27 @@ that your email address is probably `user@example.com` rather than
|
|||||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||||
[Setting up Federation](docs/federate.md).
|
[Setting up Federation](docs/federate.md).
|
||||||
|
|
||||||
## Installing Synapse
|
# Installing Synapse
|
||||||
|
|
||||||
### Installing from source
|
## Installing from source
|
||||||
|
|
||||||
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||||
|
|
||||||
When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
|
|
||||||
|
|
||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.5.2 or later, up to Python 3.9.
|
- Python 3.5, 3.6, 3.7, or 2.7
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
|
Synapse is written in Python but some of the libraries it uses are written in
|
||||||
|
C. So before we can install Synapse itself we need a working C compiler and the
|
||||||
|
header files for Python C extensions. See [Platform-Specific
|
||||||
|
Instructions](#platform-specific-instructions) for information on installing
|
||||||
|
these on various platforms.
|
||||||
|
|
||||||
To install the Synapse homeserver run:
|
To install the Synapse homeserver run:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
mkdir -p ~/synapse
|
mkdir -p ~/synapse
|
||||||
virtualenv -p python3 ~/synapse/env
|
virtualenv -p python3 ~/synapse/env
|
||||||
source ~/synapse/env/bin/activate
|
source ~/synapse/env/bin/activate
|
||||||
@@ -89,15 +64,15 @@ prefer.
|
|||||||
This Synapse installation can then be later upgraded by using pip again with the
|
This Synapse installation can then be later upgraded by using pip again with the
|
||||||
update flag:
|
update flag:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
source ~/synapse/env/bin/activate
|
source ~/synapse/env/bin/activate
|
||||||
pip install -U matrix-synapse
|
pip install -U matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
Before you can start Synapse, you will need to generate a configuration
|
Before you can start Synapse, you will need to generate a configuration
|
||||||
file. To do this, run (in your virtualenv, as before):
|
file. To do this, run (in your virtualenv, as before)::
|
||||||
|
|
||||||
```sh
|
```
|
||||||
cd ~/synapse
|
cd ~/synapse
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--server-name my.domain.name \
|
--server-name my.domain.name \
|
||||||
@@ -109,169 +84,189 @@ python -m synapse.app.homeserver \
|
|||||||
... substituting an appropriate value for `--server-name`.
|
... substituting an appropriate value for `--server-name`.
|
||||||
|
|
||||||
This command will generate you a config file that you can then customise, but it will
|
This command will generate you a config file that you can then customise, but it will
|
||||||
also generate a set of keys for you. These keys will allow your homeserver to
|
also generate a set of keys for you. These keys will allow your Home Server to
|
||||||
identify itself to other homeserver, so don't lose or delete them. It would be
|
identify itself to other Home Servers, so don't lose or delete them. It would be
|
||||||
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||||
change your homeserver's keys, you may find that other homeserver have the
|
change your Home Server's keys, you may find that other Home Servers have the
|
||||||
old key cached. If you update the signing key, you should change the name of the
|
old key cached. If you update the signing key, you should change the name of the
|
||||||
key in the `<server name>.signing.key` file (the second word) to something
|
key in the `<server name>.signing.key` file (the second word) to something
|
||||||
different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
|
different. See the
|
||||||
|
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||||
|
for more information on key management.)
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
run (e.g. `~/synapse`), and:
|
run (e.g. `~/synapse`), and::
|
||||||
|
|
||||||
```sh
|
cd ~/synapse
|
||||||
cd ~/synapse
|
source env/bin/activate
|
||||||
source env/bin/activate
|
synctl start
|
||||||
synctl start
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Platform-specific prerequisites
|
### Platform-Specific Instructions
|
||||||
|
|
||||||
Synapse is written in Python but some of the libraries it uses are written in
|
#### Debian/Ubuntu/Raspbian
|
||||||
C. So before we can install Synapse itself we need a working C compiler and the
|
|
||||||
header files for Python C extensions.
|
|
||||||
|
|
||||||
##### Debian/Ubuntu/Raspbian
|
|
||||||
|
|
||||||
Installing prerequisites on Ubuntu or Debian:
|
Installing prerequisites on Ubuntu or Debian:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo apt install build-essential python3-dev libffi-dev \
|
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||||
python3-pip python3-setuptools sqlite3 \
|
python-pip python-setuptools sqlite3 \
|
||||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
##### ArchLinux
|
#### ArchLinux
|
||||||
|
|
||||||
Installing prerequisites on ArchLinux:
|
Installing prerequisites on ArchLinux:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo pacman -S base-devel python python-pip \
|
sudo pacman -S base-devel python python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3
|
||||||
```
|
```
|
||||||
|
|
||||||
##### CentOS/Fedora
|
#### CentOS/Fedora
|
||||||
|
|
||||||
Installing prerequisites on CentOS or Fedora Linux:
|
Installing prerequisites on CentOS 7 or Fedora 25:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||||
python3-virtualenv libffi-devel openssl-devel python3-devel
|
python-virtualenv libffi-devel openssl-devel
|
||||||
sudo dnf groupinstall "Development Tools"
|
sudo yum groupinstall "Development Tools"
|
||||||
```
|
```
|
||||||
|
|
||||||
##### macOS
|
#### Mac OS X
|
||||||
|
|
||||||
Installing prerequisites on macOS:
|
Installing prerequisites on Mac OS X:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
sudo easy_install pip
|
sudo easy_install pip
|
||||||
sudo pip install virtualenv
|
sudo pip install virtualenv
|
||||||
brew install pkg-config libffi
|
brew install pkg-config libffi
|
||||||
```
|
```
|
||||||
|
|
||||||
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
#### OpenSUSE
|
||||||
via brew and inform `pip` about it so that `psycopg2` builds:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
brew install openssl@1.1
|
|
||||||
export LDFLAGS="-L/usr/local/opt/openssl/lib"
|
|
||||||
export CPPFLAGS="-I/usr/local/opt/openssl/include"
|
|
||||||
```
|
|
||||||
|
|
||||||
##### OpenSUSE
|
|
||||||
|
|
||||||
Installing prerequisites on openSUSE:
|
Installing prerequisites on openSUSE:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo zypper in -t pattern devel_basis
|
sudo zypper in -t pattern devel_basis
|
||||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||||
```
|
```
|
||||||
|
|
||||||
##### OpenBSD
|
#### OpenBSD
|
||||||
|
|
||||||
A port of Synapse is available under `net/synapse`. The filesystem
|
Installing prerequisites on OpenBSD:
|
||||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
|
||||||
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
|
||||||
and mounting it to `/var/synapse` should be taken into consideration.
|
|
||||||
|
|
||||||
To be able to build Synapse's dependency on python the `WRKOBJDIR`
|
```
|
||||||
(cf. `bsd.port.mk(5)`) for building python, too, needs to be on a filesystem
|
doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
|
||||||
mounted with `wxallowed` (cf. `mount(8)`).
|
libxslt jpeg
|
||||||
|
|
||||||
Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
|
|
||||||
default OpenBSD installation is mounted with `wxallowed`):
|
|
||||||
|
|
||||||
```sh
|
|
||||||
doas mkdir /usr/local/pobj_wxallowed
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
|
There is currently no port for OpenBSD. Additionally, OpenBSD's security
|
||||||
configured in `/etc/mk.conf`:
|
settings require a slightly more difficult installation process.
|
||||||
|
|
||||||
```sh
|
XXX: I suspect this is out of date.
|
||||||
doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
|
|
||||||
```
|
|
||||||
|
|
||||||
Setting the `WRKOBJDIR` for building python:
|
1. Create a new directory in `/usr/local` called `_synapse`. Also, create a
|
||||||
|
new user called `_synapse` and set that directory as the new user's home.
|
||||||
|
This is required because, by default, OpenBSD only allows binaries which need
|
||||||
|
write and execute permissions on the same memory space to be run from
|
||||||
|
`/usr/local`.
|
||||||
|
2. `su` to the new `_synapse` user and change to their home directory.
|
||||||
|
3. Create a new virtualenv: `virtualenv -p python2.7 ~/.synapse`
|
||||||
|
4. Source the virtualenv configuration located at
|
||||||
|
`/usr/local/_synapse/.synapse/bin/activate`. This is done in `ksh` by
|
||||||
|
using the `.` command, rather than `bash`'s `source`.
|
||||||
|
5. Optionally, use `pip` to install `lxml`, which Synapse needs to parse
|
||||||
|
webpages for their titles.
|
||||||
|
6. Use `pip` to install this repository: `pip install matrix-synapse`
|
||||||
|
7. Optionally, change `_synapse`'s shell to `/bin/false` to reduce the
|
||||||
|
chance of a compromised Synapse server being used to take over your box.
|
||||||
|
|
||||||
```sh
|
After this, you may proceed with the rest of the install directions.
|
||||||
echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
Building Synapse:
|
#### Windows
|
||||||
|
|
||||||
```sh
|
|
||||||
cd /usr/ports/net/synapse
|
|
||||||
make install
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Windows
|
|
||||||
|
|
||||||
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||||
Linux provides a Linux environment on Windows 10 which is capable of using the
|
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||||
Debian, Fedora, or source installation methods. More information about WSL can
|
Debian, Fedora, or source installation methods. More information about WSL can
|
||||||
be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
|
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||||
Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
|
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||||
for Windows Server.
|
for Windows Server.
|
||||||
|
|
||||||
### Prebuilt packages
|
### Troubleshooting Installation
|
||||||
|
|
||||||
|
XXX a bunch of this is no longer relevant.
|
||||||
|
|
||||||
|
Synapse requires pip 8 or later, so if your OS provides too old a version you
|
||||||
|
may need to manually upgrade it::
|
||||||
|
|
||||||
|
sudo pip install --upgrade pip
|
||||||
|
|
||||||
|
Installing may fail with `Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)`.
|
||||||
|
You can fix this by manually upgrading pip and virtualenv::
|
||||||
|
|
||||||
|
sudo pip install --upgrade virtualenv
|
||||||
|
|
||||||
|
You can next rerun `virtualenv -p python3 synapse` to update the virtual env.
|
||||||
|
|
||||||
|
Installing may fail during installing virtualenv with `InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.`
|
||||||
|
You can fix this by manually installing ndg-httpsclient::
|
||||||
|
|
||||||
|
pip install --upgrade ndg-httpsclient
|
||||||
|
|
||||||
|
Installing may fail with `mock requires setuptools>=17.1. Aborting installation`.
|
||||||
|
You can fix this by upgrading setuptools::
|
||||||
|
|
||||||
|
pip install --upgrade setuptools
|
||||||
|
|
||||||
|
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||||
|
refuse to run until you remove the temporary installation directory it
|
||||||
|
created. To reset the installation::
|
||||||
|
|
||||||
|
rm -rf /tmp/pip_install_matrix
|
||||||
|
|
||||||
|
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
|
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||||
|
happens, you will have to individually install the dependencies which are
|
||||||
|
failing, e.g.::
|
||||||
|
|
||||||
|
pip install twisted
|
||||||
|
|
||||||
|
## Prebuilt packages
|
||||||
|
|
||||||
As an alternative to installing from source, prebuilt packages are available
|
As an alternative to installing from source, prebuilt packages are available
|
||||||
for a number of platforms.
|
for a number of platforms.
|
||||||
|
|
||||||
#### Docker images and Ansible playbooks
|
### Docker images and Ansible playbooks
|
||||||
|
|
||||||
There is an official synapse image available at
|
There is an offical synapse image available at
|
||||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
https://hub.docker.com/r/matrixdotorg/synapse which can be used with
|
||||||
the docker-compose file available at [contrib/docker](contrib/docker). Further
|
the docker-compose file available at [contrib/docker](contrib/docker). Further information on
|
||||||
information on this including configuration options is available in the README
|
this including configuration options is available in the README on
|
||||||
on hub.docker.com.
|
hub.docker.com.
|
||||||
|
|
||||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||||
Dockerfile to automate a synapse server in a single Docker image, at
|
Dockerfile to automate a synapse server in a single Docker image, at
|
||||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||||
|
|
||||||
Slavi Pantaleev has created an Ansible playbook,
|
Slavi Pantaleev has created an Ansible playbook,
|
||||||
which installs the offical Docker image of Matrix Synapse
|
which installs the offical Docker image of Matrix Synapse
|
||||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
along with many other Matrix-related services (Postgres database, riot-web, coturn, mxisd, SSL support, etc.).
|
||||||
ma1sd, SSL support, etc.).
|
|
||||||
For more details, see
|
For more details, see
|
||||||
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||||
|
|
||||||
#### Debian/Ubuntu
|
|
||||||
|
|
||||||
##### Matrix.org packages
|
### Debian/Ubuntu
|
||||||
|
|
||||||
|
#### Matrix.org packages
|
||||||
|
|
||||||
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||||
Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
|
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||||
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo apt install -y lsb-release wget apt-transport-https
|
sudo apt install -y lsb-release wget apt-transport-https
|
||||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||||
@@ -291,61 +286,56 @@ The fingerprint of the repository signing key (as shown by `gpg
|
|||||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||||
|
|
||||||
##### Downstream Debian packages
|
#### Downstream Debian/Ubuntu packages
|
||||||
|
|
||||||
We do not recommend using the packages from the default Debian `buster`
|
For `buster` and `sid`, Synapse is available in the Debian repositories and
|
||||||
repository at this time, as they are old and suffer from known security
|
it should be possible to install it with simply:
|
||||||
vulnerabilities. You can install the latest version of Synapse from
|
|
||||||
[our repository](#matrixorg-packages) or from `buster-backports`. Please
|
|
||||||
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
|
||||||
for information on how to use backports.
|
|
||||||
|
|
||||||
If you are using Debian `sid` or testing, Synapse is available in the default
|
```
|
||||||
repositories and it should be possible to install it simply with:
|
sudo apt install matrix-synapse
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo apt install matrix-synapse
|
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Downstream Ubuntu packages
|
There is also a version of `matrix-synapse` in `stretch-backports`. Please see
|
||||||
|
the [Debian documentation on
|
||||||
|
backports](https://backports.debian.org/Instructions/) for information on how
|
||||||
|
to use them.
|
||||||
|
|
||||||
We do not recommend using the packages in the default Ubuntu repository
|
We do not recommend using the packages in downstream Ubuntu at this time, as
|
||||||
at this time, as they are old and suffer from known security vulnerabilities.
|
they are old and suffer from known security vulnerabilities.
|
||||||
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
|
||||||
|
|
||||||
#### Fedora
|
### Fedora
|
||||||
|
|
||||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo dnf install matrix-synapse
|
sudo dnf install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
Oleg Girko provides Fedora RPMs at
|
Oleg Girko provides Fedora RPMs at
|
||||||
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||||
|
|
||||||
#### OpenSUSE
|
### OpenSUSE
|
||||||
|
|
||||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo zypper install matrix-synapse
|
sudo zypper install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
#### SUSE Linux Enterprise Server
|
### SUSE Linux Enterprise Server
|
||||||
|
|
||||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||||
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||||
|
|
||||||
#### ArchLinux
|
### ArchLinux
|
||||||
|
|
||||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||||
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||||
the necessary dependencies.
|
the necessary dependencies.
|
||||||
|
|
||||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -354,198 +344,92 @@ ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
|||||||
compile it under the right architecture. (This should not be needed if
|
compile it under the right architecture. (This should not be needed if
|
||||||
installing under virtualenv):
|
installing under virtualenv):
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo pip uninstall py-bcrypt
|
sudo pip uninstall py-bcrypt
|
||||||
sudo pip install py-bcrypt
|
sudo pip install py-bcrypt
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Void Linux
|
### FreeBSD
|
||||||
|
|
||||||
Synapse can be found in the void repositories as 'synapse':
|
|
||||||
|
|
||||||
```sh
|
|
||||||
xbps-install -Su
|
|
||||||
xbps-install -S synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
#### FreeBSD
|
|
||||||
|
|
||||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||||
|
|
||||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||||
- Packages: `pkg install py37-matrix-synapse`
|
- Packages: `pkg install py27-matrix-synapse`
|
||||||
|
|
||||||
#### OpenBSD
|
|
||||||
|
|
||||||
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
### NixOS
|
||||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
|
||||||
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
|
||||||
and mounting it to `/var/synapse` should be taken into consideration.
|
|
||||||
|
|
||||||
Installing Synapse:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
doas pkg_add synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
#### NixOS
|
|
||||||
|
|
||||||
Robin Lambertz has packaged Synapse for NixOS at:
|
Robin Lambertz has packaged Synapse for NixOS at:
|
||||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||||
|
|
||||||
## Setting up Synapse
|
# Setting up Synapse
|
||||||
|
|
||||||
Once you have installed synapse as above, you will need to configure it.
|
Once you have installed synapse as above, you will need to configure it.
|
||||||
|
|
||||||
### Using PostgreSQL
|
## TLS certificates
|
||||||
|
|
||||||
By default Synapse uses [SQLite](https://sqlite.org/) and in doing so trades performance for convenience.
|
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
is suitable for local testing, but for any practical use, you will either need
|
||||||
very light workloads.
|
to enable a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||||
|
|
||||||
Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org). Advantages include:
|
For information on using a reverse proxy, see
|
||||||
|
[docs/reverse_proxy.rst](docs/reverse_proxy.rst).
|
||||||
|
|
||||||
- significant performance improvements due to the superior threading and
|
To configure Synapse to expose an HTTPS port, you will need to edit
|
||||||
caching model, smarter query optimiser
|
`homeserver.yaml`, as follows:
|
||||||
- allowing the DB to be run on separate hardware
|
|
||||||
|
|
||||||
For information on how to install and use PostgreSQL in Synapse, please see
|
* First, under the `listeners` section, uncomment the configuration for the
|
||||||
[docs/postgres.md](docs/postgres.md)
|
|
||||||
|
|
||||||
### TLS certificates
|
|
||||||
|
|
||||||
The default configuration exposes a single HTTP port on the local
|
|
||||||
interface: `http://localhost:8008`. It is suitable for local testing,
|
|
||||||
but for any practical use, you will need Synapse's APIs to be served
|
|
||||||
over HTTPS.
|
|
||||||
|
|
||||||
The recommended way to do so is to set up a reverse proxy on port
|
|
||||||
`8448`. You can find documentation on doing so in
|
|
||||||
[docs/reverse_proxy.md](docs/reverse_proxy.md).
|
|
||||||
|
|
||||||
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
|
||||||
so, you will need to edit `homeserver.yaml`, as follows:
|
|
||||||
|
|
||||||
- First, under the `listeners` section, uncomment the configuration for the
|
|
||||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||||
each line). The relevant lines are like this:
|
each line). The relevant lines are like this:
|
||||||
|
|
||||||
```yaml
|
|
||||||
- port: 8448
|
|
||||||
type: http
|
|
||||||
tls: true
|
|
||||||
resources:
|
|
||||||
- names: [client, federation]
|
|
||||||
```
|
```
|
||||||
|
- port: 8448
|
||||||
- You will also need to uncomment the `tls_certificate_path` and
|
type: http
|
||||||
`tls_private_key_path` lines under the `TLS` section. You will need to manage
|
tls: true
|
||||||
provisioning of these certificates yourself — Synapse had built-in ACME
|
resources:
|
||||||
support, but the ACMEv1 protocol Synapse implements is deprecated, not
|
- names: [client, federation]
|
||||||
allowed by LetsEncrypt for new sites, and will break for existing sites in
|
```
|
||||||
late 2020. See [ACME.md](docs/ACME.md).
|
* You will also need to uncomment the `tls_certificate_path` and
|
||||||
|
`tls_private_key_path` lines under the `TLS` section. You can either
|
||||||
If you are using your own certificate, be sure to use a `.pem` file that
|
point these settings at an existing certificate and key, or you can
|
||||||
includes the full certificate chain including any intermediate certificates
|
enable Synapse's built-in ACME (Let's Encrypt) support. Instructions
|
||||||
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
|
for having Synapse automatically provision and renew federation
|
||||||
|
certificates through ACME can be found at [ACME.md](docs/ACME.md). If you
|
||||||
|
are using your own certificate, be sure to use a `.pem` file that includes
|
||||||
|
the full certificate chain including any intermediate certificates (for
|
||||||
|
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||||
`cert.pem`).
|
`cert.pem`).
|
||||||
|
|
||||||
For a more detailed guide to configuring your server for federation, see
|
For a more detailed guide to configuring your server for federation, see
|
||||||
[federate.md](docs/federate.md).
|
[federate.md](docs/federate.md)
|
||||||
|
|
||||||
### Client Well-Known URI
|
|
||||||
|
|
||||||
Setting up the client Well-Known URI is optional but if you set it up, it will
|
## Email
|
||||||
allow users to enter their full username (e.g. `@user:<server_name>`) into clients
|
|
||||||
which support well-known lookup to automatically configure the homeserver and
|
|
||||||
identity server URLs. This is useful so that users don't have to memorize or think
|
|
||||||
about the actual homeserver URL you are using.
|
|
||||||
|
|
||||||
The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
|
It is desirable for Synapse to have the capability to send email. For example,
|
||||||
the following format.
|
this is required to support the 'password reset' feature.
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"m.homeserver": {
|
|
||||||
"base_url": "https://<matrix.example.com>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
It can optionally contain identity server information as well.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"m.homeserver": {
|
|
||||||
"base_url": "https://<matrix.example.com>"
|
|
||||||
},
|
|
||||||
"m.identity_server": {
|
|
||||||
"base_url": "https://<identity.example.com>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
To work in browser based clients, the file must be served with the appropriate
|
|
||||||
Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
|
|
||||||
`Access-Control-Allow-Origin: *` which would allow all browser based clients to
|
|
||||||
view it.
|
|
||||||
|
|
||||||
In nginx this would be something like:
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
location /.well-known/matrix/client {
|
|
||||||
return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
|
|
||||||
default_type application/json;
|
|
||||||
add_header Access-Control-Allow-Origin *;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You should also ensure the `public_baseurl` option in `homeserver.yaml` is set
|
|
||||||
correctly. `public_baseurl` should be set to the URL that clients will use to
|
|
||||||
connect to your server. This is the same URL you put for the `m.homeserver`
|
|
||||||
`base_url` above.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
public_baseurl: "https://<matrix.example.com>"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Email
|
|
||||||
|
|
||||||
It is desirable for Synapse to have the capability to send email. This allows
|
|
||||||
Synapse to send password reset emails, send verifications when an email address
|
|
||||||
is added to a user's account, and send email notifications to users when they
|
|
||||||
receive new messages.
|
|
||||||
|
|
||||||
To configure an SMTP server for Synapse, modify the configuration section
|
To configure an SMTP server for Synapse, modify the configuration section
|
||||||
headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
|
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||||
and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||||
`smtp_pass`, and `require_transport_security`.
|
``smtp_pass``, and ``require_transport_security``.
|
||||||
|
|
||||||
If email is not configured, password reset, registration and notifications via
|
If Synapse is not configured with an SMTP server, password reset via email will
|
||||||
email will be disabled.
|
be disabled by default.
|
||||||
|
|
||||||
### Registering a user
|
## Registering a user
|
||||||
|
|
||||||
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
You will need at least one user on your server in order to use a Matrix
|
||||||
|
client. Users can be registered either via a Matrix client, or via a
|
||||||
|
commandline script.
|
||||||
|
|
||||||
Alternatively, you can do so from the command line. This can be done as follows:
|
To get started, it is easiest to use the command line to register new
|
||||||
|
users. This can be done as follows:
|
||||||
|
|
||||||
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
|
|
||||||
installed via a prebuilt package, `register_new_matrix_user` should already be
|
|
||||||
on the search path):
|
|
||||||
```sh
|
|
||||||
cd ~/synapse
|
|
||||||
source env/bin/activate
|
|
||||||
synctl start # if not already running
|
|
||||||
```
|
|
||||||
2. Run the following command:
|
|
||||||
```sh
|
|
||||||
register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
|
||||||
```
|
|
||||||
|
|
||||||
This will prompt you to add details for the new user, and will then connect to
|
|
||||||
the running Synapse to create the new user. For example:
|
|
||||||
```
|
```
|
||||||
|
$ source ~/synapse/env/bin/activate
|
||||||
|
$ synctl start # if not already running
|
||||||
|
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||||
New user localpart: erikj
|
New user localpart: erikj
|
||||||
Password:
|
Password:
|
||||||
Confirm password:
|
Confirm password:
|
||||||
@@ -560,35 +444,22 @@ value is generated by `--generate-config`), but it should be kept secret, as
|
|||||||
anyone with knowledge of it can register users, including admin accounts,
|
anyone with knowledge of it can register users, including admin accounts,
|
||||||
on your server even if `enable_registration` is `false`.
|
on your server even if `enable_registration` is `false`.
|
||||||
|
|
||||||
### Setting up a TURN server
|
## Setting up a TURN server
|
||||||
|
|
||||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
a TURN server. See [docs/turn-howto.rst](docs/turn-howto.rst) for details.
|
||||||
|
|
||||||
### URL previews
|
## URL previews
|
||||||
|
|
||||||
Synapse includes support for previewing URLs, which is disabled by default. To
|
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||||
turn it on you must enable the `url_preview_enabled: True` config parameter
|
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||||
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||||
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||||
This is critical from a security perspective to stop arbitrary Matrix users
|
This is critical from a security perspective to stop arbitrary Matrix users
|
||||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||||
your loopback and RFC1918 IP addresses are blacklisted.
|
your loopback and RFC1918 IP addresses are blacklisted.
|
||||||
|
|
||||||
This also requires the optional `lxml` python dependency to be installed. This
|
This also requires the optional lxml and netaddr python dependencies to be
|
||||||
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
installed. This in turn requires the libxml2 library to be available - on
|
||||||
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||||
|
your OS.
|
||||||
### Troubleshooting Installation
|
|
||||||
|
|
||||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
|
||||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
|
||||||
happens, you will have to individually install the dependencies which are
|
|
||||||
failing, e.g.:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
pip install twisted
|
|
||||||
```
|
|
||||||
|
|
||||||
If you have any other problems, feel free to ask in
|
|
||||||
[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
|
|
||||||
|
|||||||
+19
-22
@@ -7,23 +7,22 @@ include demo/README
|
|||||||
include demo/demo.tls.dh
|
include demo/demo.tls.dh
|
||||||
include demo/*.py
|
include demo/*.py
|
||||||
include demo/*.sh
|
include demo/*.sh
|
||||||
|
include sytest-blacklist
|
||||||
|
|
||||||
recursive-include synapse/storage *.sql
|
recursive-include synapse/storage/schema *.sql
|
||||||
recursive-include synapse/storage *.sql.postgres
|
recursive-include synapse/storage/schema *.sql.postgres
|
||||||
recursive-include synapse/storage *.sql.sqlite
|
recursive-include synapse/storage/schema *.sql.sqlite
|
||||||
recursive-include synapse/storage *.py
|
recursive-include synapse/storage/schema *.py
|
||||||
recursive-include synapse/storage *.txt
|
recursive-include synapse/storage/schema *.txt
|
||||||
recursive-include synapse/storage *.md
|
|
||||||
|
|
||||||
recursive-include docs *
|
recursive-include docs *
|
||||||
recursive-include scripts *
|
recursive-include scripts *
|
||||||
recursive-include scripts-dev *
|
recursive-include scripts-dev *
|
||||||
recursive-include synapse *.pyi
|
recursive-include synapse *.pyi
|
||||||
recursive-include tests *.py
|
recursive-include tests *.py
|
||||||
recursive-include tests *.pem
|
include tests/http/ca.crt
|
||||||
recursive-include tests *.p8
|
include tests/http/ca.key
|
||||||
recursive-include tests *.crt
|
include tests/http/server.key
|
||||||
recursive-include tests *.key
|
|
||||||
|
|
||||||
recursive-include synapse/res *
|
recursive-include synapse/res *
|
||||||
recursive-include synapse/static *.css
|
recursive-include synapse/static *.css
|
||||||
@@ -31,24 +30,22 @@ recursive-include synapse/static *.gif
|
|||||||
recursive-include synapse/static *.html
|
recursive-include synapse/static *.html
|
||||||
recursive-include synapse/static *.js
|
recursive-include synapse/static *.js
|
||||||
|
|
||||||
exclude .codecov.yml
|
|
||||||
exclude .coveragerc
|
|
||||||
exclude .dockerignore
|
|
||||||
exclude .editorconfig
|
|
||||||
exclude Dockerfile
|
exclude Dockerfile
|
||||||
exclude mypy.ini
|
exclude .dockerignore
|
||||||
exclude sytest-blacklist
|
|
||||||
exclude test_postgresql.sh
|
exclude test_postgresql.sh
|
||||||
|
exclude .editorconfig
|
||||||
|
|
||||||
include pyproject.toml
|
include pyproject.toml
|
||||||
recursive-include changelog.d *
|
recursive-include changelog.d *
|
||||||
|
|
||||||
prune .buildkite
|
|
||||||
prune .circleci
|
|
||||||
prune .github
|
prune .github
|
||||||
prune contrib
|
|
||||||
prune debian
|
|
||||||
prune demo/etc
|
prune demo/etc
|
||||||
prune docker
|
prune docker
|
||||||
prune snap
|
prune .circleci
|
||||||
prune stubs
|
prune .coveragerc
|
||||||
|
prune debian
|
||||||
|
prune .codecov.yml
|
||||||
|
prune .buildkite
|
||||||
|
|
||||||
|
exclude jenkins*
|
||||||
|
recursive-exclude jenkins *.sh
|
||||||
|
|||||||
+58
-109
@@ -1,7 +1,3 @@
|
|||||||
=========================================================
|
|
||||||
Synapse |support| |development| |license| |pypi| |python|
|
|
||||||
=========================================================
|
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
Introduction
|
Introduction
|
||||||
@@ -41,7 +37,7 @@ which handle:
|
|||||||
- Eventually-consistent cryptographically secure synchronisation of room
|
- Eventually-consistent cryptographically secure synchronisation of room
|
||||||
state across a global open network of federated servers and services
|
state across a global open network of federated servers and services
|
||||||
- Sending and receiving extensible messages in a room with (optional)
|
- Sending and receiving extensible messages in a room with (optional)
|
||||||
end-to-end encryption
|
end-to-end encryption[1]
|
||||||
- Inviting, joining, leaving, kicking, banning room members
|
- Inviting, joining, leaving, kicking, banning room members
|
||||||
- Managing user accounts (registration, login, logout)
|
- Managing user accounts (registration, login, logout)
|
||||||
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
||||||
@@ -78,15 +74,7 @@ at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
|
|||||||
|
|
||||||
Thanks for using Matrix!
|
Thanks for using Matrix!
|
||||||
|
|
||||||
Support
|
[1] End-to-end encryption is currently in beta: `blog post <https://matrix.org/blog/2016/11/21/matrixs-olm-end-to-end-encryption-security-assessment-released-and-implemented-cross-platform-on-riot-at-last>`_.
|
||||||
=======
|
|
||||||
|
|
||||||
For support installing or managing Synapse, please join |room|_ (from a matrix.org
|
|
||||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
|
||||||
support requests, only for bug reports and feature requests.
|
|
||||||
|
|
||||||
.. |room| replace:: ``#synapse:matrix.org``
|
|
||||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
|
|
||||||
Synapse Installation
|
Synapse Installation
|
||||||
@@ -108,11 +96,12 @@ Unless you are running a test instance of Synapse on your local machine, in
|
|||||||
general, you will need to enable TLS support before you can successfully
|
general, you will need to enable TLS support before you can successfully
|
||||||
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Riot at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://riot.im/app/#/login or https://riot.im/app/#/register respectively.
|
||||||
You will need to change the server you are logging into from ``matrix.org``
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
|
(Leave the identity server as the default - see `Identity servers`_.)
|
||||||
If you prefer to use another client, refer to our
|
If you prefer to use another client, refer to our
|
||||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||||
|
|
||||||
@@ -126,10 +115,10 @@ Registering a new user from a client
|
|||||||
|
|
||||||
By default, registration of new users via Matrix clients is disabled. To enable
|
By default, registration of new users via Matrix clients is disabled. To enable
|
||||||
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
||||||
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
|
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.rst>`_.)
|
||||||
|
|
||||||
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
||||||
user via a Matrix client.
|
user via `riot.im <https://riot.im/app/#/register>`_ or other Matrix clients.
|
||||||
|
|
||||||
Your new user name will be formed partly from the ``server_name``, and partly
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
from a localpart you specify when you create the account. Your name will take
|
from a localpart you specify when you create the account. Your name will take
|
||||||
@@ -175,6 +164,30 @@ versions of synapse.
|
|||||||
|
|
||||||
.. _UPGRADE.rst: UPGRADE.rst
|
.. _UPGRADE.rst: UPGRADE.rst
|
||||||
|
|
||||||
|
|
||||||
|
Using PostgreSQL
|
||||||
|
================
|
||||||
|
|
||||||
|
Synapse offers two database engines:
|
||||||
|
* `SQLite <https://sqlite.org/>`_
|
||||||
|
* `PostgreSQL <https://www.postgresql.org>`_
|
||||||
|
|
||||||
|
By default Synapse uses SQLite in and doing so trades performance for convenience.
|
||||||
|
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||||
|
light workloads.
|
||||||
|
|
||||||
|
Almost all installations should opt to use PostreSQL. Advantages include:
|
||||||
|
|
||||||
|
* significant performance improvements due to the superior threading and
|
||||||
|
caching model, smarter query optimiser
|
||||||
|
* allowing the DB to be run on separate hardware
|
||||||
|
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||||
|
pointing at the same DB master, as well as enabling DB replication in
|
||||||
|
synapse itself.
|
||||||
|
|
||||||
|
For information on how to install and use PostgreSQL, please see
|
||||||
|
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||||
|
|
||||||
.. _reverse-proxy:
|
.. _reverse-proxy:
|
||||||
|
|
||||||
Using a reverse proxy with Synapse
|
Using a reverse proxy with Synapse
|
||||||
@@ -183,13 +196,12 @@ Using a reverse proxy with Synapse
|
|||||||
It is recommended to put a reverse proxy such as
|
It is recommended to put a reverse proxy such as
|
||||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
`Caddy <https://caddyserver.com/docs/proxy>`_ or
|
||||||
`HAProxy <https://www.haproxy.org/>`_ or
|
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
|
||||||
doing so is that it means that you can expose the default https port (443) to
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
|
||||||
For information on configuring one, see `<docs/reverse_proxy.md>`_.
|
For information on configuring one, see `<docs/reverse_proxy.rst>`_.
|
||||||
|
|
||||||
Identity Servers
|
Identity Servers
|
||||||
================
|
================
|
||||||
@@ -224,9 +236,10 @@ email address.
|
|||||||
Password reset
|
Password reset
|
||||||
==============
|
==============
|
||||||
|
|
||||||
Users can reset their password through their client. Alternatively, a server admin
|
If a user has registered an email address to their account using an identity
|
||||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
server, they can request a password-reset token via clients such as Riot.
|
||||||
or by directly editing the database as shown below.
|
|
||||||
|
A manual password reset can be done via direct database access as follows.
|
||||||
|
|
||||||
First calculate the hash of the new password::
|
First calculate the hash of the new password::
|
||||||
|
|
||||||
@@ -235,7 +248,7 @@ First calculate the hash of the new password::
|
|||||||
Confirm password:
|
Confirm password:
|
||||||
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
|
||||||
Then update the ``users`` table in the database::
|
Then update the `users` table in the database::
|
||||||
|
|
||||||
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||||
WHERE name='@test:test.com';
|
WHERE name='@test:test.com';
|
||||||
@@ -244,8 +257,6 @@ Then update the ``users`` table in the database::
|
|||||||
Synapse Development
|
Synapse Development
|
||||||
===================
|
===================
|
||||||
|
|
||||||
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
|
||||||
|
|
||||||
Before setting up a development environment for synapse, make sure you have the
|
Before setting up a development environment for synapse, make sure you have the
|
||||||
system dependencies (such as the python header files) installed - see
|
system dependencies (such as the python header files) installed - see
|
||||||
`Installing from source <INSTALL.md#installing-from-source>`_.
|
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||||
@@ -259,48 +270,23 @@ directory of your choice::
|
|||||||
Synapse has a number of external dependencies, that are easiest
|
Synapse has a number of external dependencies, that are easiest
|
||||||
to install using pip and a virtualenv::
|
to install using pip and a virtualenv::
|
||||||
|
|
||||||
python3 -m venv ./env
|
virtualenv -p python3 env
|
||||||
source ./env/bin/activate
|
source env/bin/activate
|
||||||
pip install -e ".[all,test]"
|
python -m pip install --no-use-pep517 -e .[all]
|
||||||
|
|
||||||
This will run a process of downloading and installing all the needed
|
This will run a process of downloading and installing all the needed
|
||||||
dependencies into a virtual env. If any dependencies fail to install,
|
dependencies into a virtual env.
|
||||||
try installing the failing modules individually::
|
|
||||||
|
|
||||||
pip install -e "module-name"
|
Once this is done, you may wish to run Synapse's unit tests, to
|
||||||
|
check that everything is installed as it should be::
|
||||||
Once this is done, you may wish to run Synapse's unit tests to
|
|
||||||
check that everything is installed correctly::
|
|
||||||
|
|
||||||
python -m twisted.trial tests
|
python -m twisted.trial tests
|
||||||
|
|
||||||
This should end with a 'PASSED' result (note that exact numbers will
|
This should end with a 'PASSED' result::
|
||||||
differ)::
|
|
||||||
|
|
||||||
Ran 1337 tests in 716.064s
|
|
||||||
|
|
||||||
PASSED (skips=15, successes=1322)
|
|
||||||
|
|
||||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
|
||||||
|
|
||||||
./demo/start.sh
|
|
||||||
|
|
||||||
(to stop, you can use `./demo/stop.sh`)
|
|
||||||
|
|
||||||
If you just want to start a single instance of the app and run it directly::
|
|
||||||
|
|
||||||
# Create the homeserver.yaml config once
|
|
||||||
python -m synapse.app.homeserver \
|
|
||||||
--server-name my.domain.name \
|
|
||||||
--config-path homeserver.yaml \
|
|
||||||
--generate-config \
|
|
||||||
--report-stats=[yes|no]
|
|
||||||
|
|
||||||
# Start the app
|
|
||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
|
||||||
|
|
||||||
|
|
||||||
|
Ran 143 tests in 0.601s
|
||||||
|
|
||||||
|
PASSED (successes=143)
|
||||||
|
|
||||||
Running the Integration Tests
|
Running the Integration Tests
|
||||||
=============================
|
=============================
|
||||||
@@ -314,21 +300,22 @@ Testing with SyTest is recommended for verifying that changes related to the
|
|||||||
Client-Server API are functioning correctly. See the `installation instructions
|
Client-Server API are functioning correctly. See the `installation instructions
|
||||||
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||||
|
|
||||||
|
Building Internal API Documentation
|
||||||
|
===================================
|
||||||
|
|
||||||
Platform dependencies
|
Before building internal API documentation install sphinx and
|
||||||
=====================
|
sphinxcontrib-napoleon::
|
||||||
|
|
||||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
pip install sphinx
|
||||||
and aims to follow supported upstream versions. See the
|
pip install sphinxcontrib-napoleon
|
||||||
`<docs/deprecation_policy.md>`_ document for more details.
|
|
||||||
|
|
||||||
|
Building internal API documentation::
|
||||||
|
|
||||||
|
python setup.py build_sphinx
|
||||||
|
|
||||||
Troubleshooting
|
Troubleshooting
|
||||||
===============
|
===============
|
||||||
|
|
||||||
Need help? Join our community support room on Matrix:
|
|
||||||
`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
|
|
||||||
|
|
||||||
Running out of File Handles
|
Running out of File Handles
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
@@ -393,42 +380,4 @@ massive excess of outgoing federation requests (see `discussion
|
|||||||
indicate that your server is also issuing far more outgoing federation
|
indicate that your server is also issuing far more outgoing federation
|
||||||
requests than can be accounted for by your users' activity, this is a
|
requests than can be accounted for by your users' activity, this is a
|
||||||
likely cause. The misbehavior can be worked around by setting
|
likely cause. The misbehavior can be worked around by setting
|
||||||
the following in the Synapse config file:
|
``use_presence: false`` in the Synapse config file.
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
presence:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
People can't accept room invitations from me
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
The typical failure mode here is that you send an invitation to someone
|
|
||||||
to join a room or direct chat, but when they go to accept it, they get an
|
|
||||||
error (typically along the lines of "Invalid signature"). They might see
|
|
||||||
something like the following in their logs::
|
|
||||||
|
|
||||||
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
|
|
||||||
|
|
||||||
This is normally caused by a misconfiguration in your reverse-proxy. See
|
|
||||||
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
|
||||||
:alt: (get support on #synapse:matrix.org)
|
|
||||||
:target: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
.. |development| image:: https://img.shields.io/matrix/synapse-dev:matrix.org?label=development&logo=matrix
|
|
||||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
|
||||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
|
||||||
|
|
||||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
|
||||||
:alt: (check license in LICENSE file)
|
|
||||||
:target: LICENSE
|
|
||||||
|
|
||||||
.. |pypi| image:: https://img.shields.io/pypi/v/matrix-synapse
|
|
||||||
:alt: (latest version released on PyPi)
|
|
||||||
:target: https://pypi.org/project/matrix-synapse
|
|
||||||
|
|
||||||
.. |python| image:: https://img.shields.io/pypi/pyversions/matrix-synapse
|
|
||||||
:alt: (supported python versions)
|
|
||||||
:target: https://pypi.org/project/matrix-synapse
|
|
||||||
|
|||||||
+27
-816
@@ -2,828 +2,52 @@ Upgrading Synapse
|
|||||||
=================
|
=================
|
||||||
|
|
||||||
Before upgrading check if any special steps are required to upgrade from the
|
Before upgrading check if any special steps are required to upgrade from the
|
||||||
version you currently have installed to the current version of Synapse. The extra
|
what you currently have installed to current version of synapse. The extra
|
||||||
instructions that may be required are listed later in this document.
|
instructions that may be required are listed later in this document.
|
||||||
|
|
||||||
* Check that your versions of Python and PostgreSQL are still supported.
|
1. If synapse was installed in a virtualenv then activate that virtualenv before
|
||||||
|
upgrading. If synapse is installed in a virtualenv in ``~/synapse/env`` then
|
||||||
|
run:
|
||||||
|
|
||||||
Synapse follows upstream lifecycles for `Python`_ and `PostgreSQL`_, and
|
.. code:: bash
|
||||||
removes support for versions which are no longer maintained.
|
|
||||||
|
|
||||||
The website https://endoflife.date also offers convenient summaries.
|
|
||||||
|
|
||||||
.. _Python: https://devguide.python.org/devcycle/#end-of-life-branches
|
|
||||||
.. _PostgreSQL: https://www.postgresql.org/support/versioning/
|
|
||||||
|
|
||||||
* If Synapse was installed using `prebuilt packages
|
|
||||||
<INSTALL.md#prebuilt-packages>`_, you will need to follow the normal process
|
|
||||||
for upgrading those packages.
|
|
||||||
|
|
||||||
* If Synapse was installed from source, then:
|
|
||||||
|
|
||||||
1. Activate the virtualenv before upgrading. For example, if Synapse is
|
|
||||||
installed in a virtualenv in ``~/synapse/env`` then run:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
source ~/synapse/env/bin/activate
|
source ~/synapse/env/bin/activate
|
||||||
|
|
||||||
2. If Synapse was installed using pip then upgrade to the latest version by
|
2. If synapse was installed using pip then upgrade to the latest version by
|
||||||
running:
|
running:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
pip install --upgrade matrix-synapse
|
pip install --upgrade matrix-synapse[all]
|
||||||
|
|
||||||
If Synapse was installed using git then upgrade to the latest version by
|
# restart synapse
|
||||||
running:
|
synctl restart
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
|
If synapse was installed using git then upgrade to the latest version by
|
||||||
|
running:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
# Pull the latest version of the master branch.
|
||||||
git pull
|
git pull
|
||||||
pip install --upgrade .
|
|
||||||
|
|
||||||
3. Restart Synapse:
|
# Update synapse and its python dependencies.
|
||||||
|
pip install --upgrade .[all]
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
|
# restart synapse
|
||||||
./synctl restart
|
./synctl restart
|
||||||
|
|
||||||
To check whether your update was successful, you can check the running server
|
|
||||||
version with:
|
To check whether your update was successful, you can check the Server header
|
||||||
|
returned by the Client-Server API:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
# you may need to replace 'localhost:8008' if synapse is not configured
|
# replace <host.name> with the hostname of your synapse homeserver.
|
||||||
# to listen on port 8008.
|
# You may need to specify a port (eg, :8448) if your server is not
|
||||||
|
# configured on port 443.
|
||||||
curl http://localhost:8008/_synapse/admin/v1/server_version
|
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||||
|
|
||||||
Rolling back to older versions
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
Rolling back to previous releases can be difficult, due to database schema
|
|
||||||
changes between releases. Where we have been able to test the rollback process,
|
|
||||||
this will be noted below.
|
|
||||||
|
|
||||||
In general, you will need to undo any changes made during the upgrade process,
|
|
||||||
for example:
|
|
||||||
|
|
||||||
* pip:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
source env/bin/activate
|
|
||||||
# replace `1.3.0` accordingly:
|
|
||||||
pip install matrix-synapse==1.3.0
|
|
||||||
|
|
||||||
* Debian:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
# replace `1.3.0` and `stretch` accordingly:
|
|
||||||
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
|
||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
|
||||||
|
|
||||||
Upgrading to v1.32.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Regression causing connected Prometheus instances to become overwhelmed
|
|
||||||
-----------------------------------------------------------------------
|
|
||||||
|
|
||||||
This release introduces `a regression <https://github.com/matrix-org/synapse/issues/9853>`_
|
|
||||||
that can overwhelm connected Prometheus instances. This issue is not present in
|
|
||||||
Synapse v1.32.0rc1.
|
|
||||||
|
|
||||||
If you have been affected, please downgrade to 1.31.0. You then may need to
|
|
||||||
remove excess writeahead logs in order for Prometheus to recover. Instructions
|
|
||||||
for doing so are provided
|
|
||||||
`here <https://github.com/matrix-org/synapse/pull/9854#issuecomment-823472183>`_.
|
|
||||||
|
|
||||||
Dropping support for old Python, Postgres and SQLite versions
|
|
||||||
-------------------------------------------------------------
|
|
||||||
|
|
||||||
In line with our `deprecation policy <https://github.com/matrix-org/synapse/blob/release-v1.32.0/docs/deprecation_policy.md>`_,
|
|
||||||
we've dropped support for Python 3.5 and PostgreSQL 9.5, as they are no longer supported upstream.
|
|
||||||
|
|
||||||
This release of Synapse requires Python 3.6+ and PostgresSQL 9.6+ or SQLite 3.22+.
|
|
||||||
|
|
||||||
Removal of old List Accounts Admin API
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
The deprecated v1 "list accounts" admin API (``GET /_synapse/admin/v1/users/<user_id>``) has been removed in this version.
|
|
||||||
|
|
||||||
The `v2 list accounts API <https://github.com/matrix-org/synapse/blob/master/docs/admin_api/user_admin_api.rst#list-accounts>`_
|
|
||||||
has been available since Synapse 1.7.0 (2019-12-13), and is accessible under ``GET /_synapse/admin/v2/users``.
|
|
||||||
|
|
||||||
The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
|
|
||||||
|
|
||||||
Application Services must use type ``m.login.application_service`` when registering users
|
|
||||||
-----------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
In compliance with the
|
|
||||||
`Application Service spec <https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions>`_,
|
|
||||||
Application Services are now required to use the ``m.login.application_service`` type when registering users via the
|
|
||||||
``/_matrix/client/r0/register`` endpoint. This behaviour was deprecated in Synapse v1.30.0.
|
|
||||||
|
|
||||||
Please ensure your Application Services are up to date.
|
|
||||||
|
|
||||||
Upgrading to v1.29.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Requirement for X-Forwarded-Proto header
|
|
||||||
----------------------------------------
|
|
||||||
|
|
||||||
When using Synapse with a reverse proxy (in particular, when using the
|
|
||||||
`x_forwarded` option on an HTTP listener), Synapse now expects to receive an
|
|
||||||
`X-Forwarded-Proto` header on incoming HTTP requests. If it is not set, Synapse
|
|
||||||
will log a warning on each received request.
|
|
||||||
|
|
||||||
To avoid the warning, administrators using a reverse proxy should ensure that
|
|
||||||
the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
|
|
||||||
indicate the protocol used by the client.
|
|
||||||
|
|
||||||
Synapse also requires the `Host` header to be preserved.
|
|
||||||
|
|
||||||
See the `reverse proxy documentation <docs/reverse_proxy.md>`_, where the
|
|
||||||
example configurations have been updated to show how to set these headers.
|
|
||||||
|
|
||||||
(Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
|
|
||||||
sets `X-Forwarded-Proto` by default.)
|
|
||||||
|
|
||||||
Upgrading to v1.27.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Changes to callback URI for OAuth2 / OpenID Connect and SAML2
|
|
||||||
-------------------------------------------------------------
|
|
||||||
|
|
||||||
This version changes the URI used for callbacks from OAuth2 and SAML2 identity providers:
|
|
||||||
|
|
||||||
* If your server is configured for single sign-on via an OpenID Connect or OAuth2 identity
|
|
||||||
provider, you will need to add ``[synapse public baseurl]/_synapse/client/oidc/callback``
|
|
||||||
to the list of permitted "redirect URIs" at the identity provider.
|
|
||||||
|
|
||||||
See `docs/openid.md <docs/openid.md>`_ for more information on setting up OpenID
|
|
||||||
Connect.
|
|
||||||
|
|
||||||
* If your server is configured for single sign-on via a SAML2 identity provider, you will
|
|
||||||
need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted
|
|
||||||
"ACS location" (also known as "allowed callback URLs") at the identity provider.
|
|
||||||
|
|
||||||
The "Issuer" in the "AuthnRequest" to the SAML2 identity provider is also updated to
|
|
||||||
``[synapse public baseurl]/_synapse/client/saml2/metadata.xml``. If your SAML2 identity
|
|
||||||
provider uses this property to validate or otherwise identify Synapse, its configuration
|
|
||||||
will need to be updated to use the new URL. Alternatively you could create a new, separate
|
|
||||||
"EntityDescriptor" in your SAML2 identity provider with the new URLs and leave the URLs in
|
|
||||||
the existing "EntityDescriptor" as they were.
|
|
||||||
|
|
||||||
Changes to HTML templates
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
The HTML templates for SSO and email notifications now have `Jinja2's autoescape <https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping>`_
|
|
||||||
enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you have customised
|
|
||||||
these templates and see issues when viewing them you might need to update them.
|
|
||||||
It is expected that most configurations will need no changes.
|
|
||||||
|
|
||||||
If you have customised the templates *names* for these templates, it is recommended
|
|
||||||
to verify they end in ``.html`` to ensure autoescape is enabled.
|
|
||||||
|
|
||||||
The above applies to the following templates:
|
|
||||||
|
|
||||||
* ``add_threepid.html``
|
|
||||||
* ``add_threepid_failure.html``
|
|
||||||
* ``add_threepid_success.html``
|
|
||||||
* ``notice_expiry.html``
|
|
||||||
* ``notice_expiry.html``
|
|
||||||
* ``notif_mail.html`` (which, by default, includes ``room.html`` and ``notif.html``)
|
|
||||||
* ``password_reset.html``
|
|
||||||
* ``password_reset_confirmation.html``
|
|
||||||
* ``password_reset_failure.html``
|
|
||||||
* ``password_reset_success.html``
|
|
||||||
* ``registration.html``
|
|
||||||
* ``registration_failure.html``
|
|
||||||
* ``registration_success.html``
|
|
||||||
* ``sso_account_deactivated.html``
|
|
||||||
* ``sso_auth_bad_user.html``
|
|
||||||
* ``sso_auth_confirm.html``
|
|
||||||
* ``sso_auth_success.html``
|
|
||||||
* ``sso_error.html``
|
|
||||||
* ``sso_login_idp_picker.html``
|
|
||||||
* ``sso_redirect_confirm.html``
|
|
||||||
|
|
||||||
Upgrading to v1.26.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Rolling back to v1.25.0 after a failed upgrade
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
v1.26.0 includes a lot of large changes. If something problematic occurs, you
|
|
||||||
may want to roll-back to a previous version of Synapse. Because v1.26.0 also
|
|
||||||
includes a new database schema version, reverting that version is also required
|
|
||||||
alongside the generic rollback instructions mentioned above. In short, to roll
|
|
||||||
back to v1.25.0 you need to:
|
|
||||||
|
|
||||||
1. Stop the server
|
|
||||||
2. Decrease the schema version in the database:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
UPDATE schema_version SET version = 58;
|
|
||||||
|
|
||||||
3. Delete the ignored users & chain cover data:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS ignored_users;
|
|
||||||
UPDATE rooms SET has_auth_chain_index = false;
|
|
||||||
|
|
||||||
For PostgreSQL run:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
TRUNCATE event_auth_chain_links;
|
|
||||||
TRUNCATE event_auth_chains;
|
|
||||||
|
|
||||||
For SQLite run:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DELETE FROM event_auth_chain_links;
|
|
||||||
DELETE FROM event_auth_chains;
|
|
||||||
|
|
||||||
4. Mark the deltas as not run (so they will re-run on upgrade).
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/01ignored_user.py";
|
|
||||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/06chain_cover_index.sql";
|
|
||||||
|
|
||||||
5. Downgrade Synapse by following the instructions for your installation method
|
|
||||||
in the "Rolling back to older versions" section above.
|
|
||||||
|
|
||||||
Upgrading to v1.25.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Last release supporting Python 3.5
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
This is the last release of Synapse which guarantees support with Python 3.5,
|
|
||||||
which passed its upstream End of Life date several months ago.
|
|
||||||
|
|
||||||
We will attempt to maintain support through March 2021, but without guarantees.
|
|
||||||
|
|
||||||
In the future, Synapse will follow upstream schedules for ending support of
|
|
||||||
older versions of Python and PostgreSQL. Please upgrade to at least Python 3.6
|
|
||||||
and PostgreSQL 9.6 as soon as possible.
|
|
||||||
|
|
||||||
Blacklisting IP ranges
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Synapse v1.25.0 includes new settings, ``ip_range_blacklist`` and
|
|
||||||
``ip_range_whitelist``, for controlling outgoing requests from Synapse for federation,
|
|
||||||
identity servers, push, and for checking key validity for third-party invite events.
|
|
||||||
The previous setting, ``federation_ip_range_blacklist``, is deprecated. The new
|
|
||||||
``ip_range_blacklist`` defaults to private IP ranges if it is not defined.
|
|
||||||
|
|
||||||
If you have never customised ``federation_ip_range_blacklist`` it is recommended
|
|
||||||
that you remove that setting.
|
|
||||||
|
|
||||||
If you have customised ``federation_ip_range_blacklist`` you should update the
|
|
||||||
setting name to ``ip_range_blacklist``.
|
|
||||||
|
|
||||||
If you have a custom push server that is reached via private IP space you may
|
|
||||||
need to customise ``ip_range_blacklist`` or ``ip_range_whitelist``.
|
|
||||||
|
|
||||||
Upgrading to v1.24.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Custom OpenID Connect mapping provider breaking change
|
|
||||||
------------------------------------------------------
|
|
||||||
|
|
||||||
This release allows the OpenID Connect mapping provider to perform normalisation
|
|
||||||
of the localpart of the Matrix ID. This allows for the mapping provider to
|
|
||||||
specify different algorithms, instead of the [default way](https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets).
|
|
||||||
|
|
||||||
If your Synapse configuration uses a custom mapping provider
|
|
||||||
(`oidc_config.user_mapping_provider.module` is specified and not equal to
|
|
||||||
`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`) then you *must* ensure
|
|
||||||
that `map_user_attributes` of the mapping provider performs some normalisation
|
|
||||||
of the `localpart` returned. To match previous behaviour you can use the
|
|
||||||
`map_username_to_mxid_localpart` function provided by Synapse. An example is
|
|
||||||
shown below:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from synapse.types import map_username_to_mxid_localpart
|
|
||||||
|
|
||||||
class MyMappingProvider:
|
|
||||||
def map_user_attributes(self, userinfo, token):
|
|
||||||
# ... your custom logic ...
|
|
||||||
sso_user_id = ...
|
|
||||||
localpart = map_username_to_mxid_localpart(sso_user_id)
|
|
||||||
|
|
||||||
return {"localpart": localpart}
|
|
||||||
|
|
||||||
Removal historical Synapse Admin API
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
Historically, the Synapse Admin API has been accessible under:
|
|
||||||
|
|
||||||
* ``/_matrix/client/api/v1/admin``
|
|
||||||
* ``/_matrix/client/unstable/admin``
|
|
||||||
* ``/_matrix/client/r0/admin``
|
|
||||||
* ``/_synapse/admin/v1``
|
|
||||||
|
|
||||||
The endpoints with ``/_matrix/client/*`` prefixes have been removed as of v1.24.0.
|
|
||||||
The Admin API is now only accessible under:
|
|
||||||
|
|
||||||
* ``/_synapse/admin/v1``
|
|
||||||
|
|
||||||
The only exception is the `/admin/whois` endpoint, which is
|
|
||||||
`also available via the client-server API <https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid>`_.
|
|
||||||
|
|
||||||
The deprecation of the old endpoints was announced with Synapse 1.20.0 (released
|
|
||||||
on 2020-09-22) and makes it easier for homeserver admins to lock down external
|
|
||||||
access to the Admin API endpoints.
|
|
||||||
|
|
||||||
Upgrading to v1.23.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Structured logging configuration breaking changes
|
|
||||||
-------------------------------------------------
|
|
||||||
|
|
||||||
This release deprecates use of the ``structured: true`` logging configuration for
|
|
||||||
structured logging. If your logging configuration contains ``structured: true``
|
|
||||||
then it should be modified based on the `structured logging documentation
|
|
||||||
<https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md>`_.
|
|
||||||
|
|
||||||
The ``structured`` and ``drains`` logging options are now deprecated and should
|
|
||||||
be replaced by standard logging configuration of ``handlers`` and ``formatters``.
|
|
||||||
|
|
||||||
A future will release of Synapse will make using ``structured: true`` an error.
|
|
||||||
|
|
||||||
Upgrading to v1.22.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
ThirdPartyEventRules breaking changes
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
This release introduces a backwards-incompatible change to modules making use of
|
|
||||||
``ThirdPartyEventRules`` in Synapse. If you make use of a module defined under the
|
|
||||||
``third_party_event_rules`` config option, please make sure it is updated to handle
|
|
||||||
the below change:
|
|
||||||
|
|
||||||
The ``http_client`` argument is no longer passed to modules as they are initialised. Instead,
|
|
||||||
modules are expected to make use of the ``http_client`` property on the ``ModuleApi`` class.
|
|
||||||
Modules are now passed a ``module_api`` argument during initialisation, which is an instance of
|
|
||||||
``ModuleApi``. ``ModuleApi`` instances have a ``http_client`` property which acts the same as
|
|
||||||
the ``http_client`` argument previously passed to ``ThirdPartyEventRules`` modules.
|
|
||||||
|
|
||||||
Upgrading to v1.21.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Forwarding ``/_synapse/client`` through your reverse proxy
|
|
||||||
----------------------------------------------------------
|
|
||||||
|
|
||||||
The `reverse proxy documentation
|
|
||||||
<https://github.com/matrix-org/synapse/blob/develop/docs/reverse_proxy.md>`_ has been updated
|
|
||||||
to include reverse proxy directives for ``/_synapse/client/*`` endpoints. As the user password
|
|
||||||
reset flow now uses endpoints under this prefix, **you must update your reverse proxy
|
|
||||||
configurations for user password reset to work**.
|
|
||||||
|
|
||||||
Additionally, note that the `Synapse worker documentation
|
|
||||||
<https://github.com/matrix-org/synapse/blob/develop/docs/workers.md>`_ has been updated to
|
|
||||||
state that the ``/_synapse/client/password_reset/email/submit_token`` endpoint can be handled
|
|
||||||
by all workers. If you make use of Synapse's worker feature, please update your reverse proxy
|
|
||||||
configuration to reflect this change.
|
|
||||||
|
|
||||||
New HTML templates
|
|
||||||
------------------
|
|
||||||
|
|
||||||
A new HTML template,
|
|
||||||
`password_reset_confirmation.html <https://github.com/matrix-org/synapse/blob/develop/synapse/res/templates/password_reset_confirmation.html>`_,
|
|
||||||
has been added to the ``synapse/res/templates`` directory. If you are using a
|
|
||||||
custom template directory, you may want to copy the template over and modify it.
|
|
||||||
|
|
||||||
Note that as of v1.20.0, templates do not need to be included in custom template
|
|
||||||
directories for Synapse to start. The default templates will be used if a custom
|
|
||||||
template cannot be found.
|
|
||||||
|
|
||||||
This page will appear to the user after clicking a password reset link that has
|
|
||||||
been emailed to them.
|
|
||||||
|
|
||||||
To complete password reset, the page must include a way to make a `POST`
|
|
||||||
request to
|
|
||||||
``/_synapse/client/password_reset/{medium}/submit_token``
|
|
||||||
with the query parameters from the original link, presented as a URL-encoded form. See the file
|
|
||||||
itself for more details.
|
|
||||||
|
|
||||||
Updated Single Sign-on HTML Templates
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
The ``saml_error.html`` template was removed from Synapse and replaced with the
|
|
||||||
``sso_error.html`` template. If your Synapse is configured to use SAML and a
|
|
||||||
custom ``sso_redirect_confirm_template_dir`` configuration then any customisations
|
|
||||||
of the ``saml_error.html`` template will need to be merged into the ``sso_error.html``
|
|
||||||
template. These templates are similar, but the parameters are slightly different:
|
|
||||||
|
|
||||||
* The ``msg`` parameter should be renamed to ``error_description``.
|
|
||||||
* There is no longer a ``code`` parameter for the response code.
|
|
||||||
* A string ``error`` parameter is available that includes a short hint of why a
|
|
||||||
user is seeing the error page.
|
|
||||||
|
|
||||||
Upgrading to v1.18.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Docker `-py3` suffix will be removed in future versions
|
|
||||||
-------------------------------------------------------
|
|
||||||
|
|
||||||
From 10th August 2020, we will no longer publish Docker images with the `-py3` tag suffix. The images tagged with the `-py3` suffix have been identical to the non-suffixed tags since release 0.99.0, and the suffix is obsolete.
|
|
||||||
|
|
||||||
On 10th August, we will remove the `latest-py3` tag. Existing per-release tags (such as `v1.18.0-py3`) will not be removed, but no new `-py3` tags will be added.
|
|
||||||
|
|
||||||
Scripts relying on the `-py3` suffix will need to be updated.
|
|
||||||
|
|
||||||
Redis replication is now recommended in lieu of TCP replication
|
|
||||||
---------------------------------------------------------------
|
|
||||||
|
|
||||||
When setting up worker processes, we now recommend the use of a Redis server for replication. **The old direct TCP connection method is deprecated and will be removed in a future release.**
|
|
||||||
See `docs/workers.md <docs/workers.md>`_ for more details.
|
|
||||||
|
|
||||||
Upgrading to v1.14.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
This version includes a database update which is run as part of the upgrade,
|
|
||||||
and which may take a couple of minutes in the case of a large server. Synapse
|
|
||||||
will not respond to HTTP requests while this update is taking place.
|
|
||||||
|
|
||||||
Upgrading to v1.13.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Incorrect database migration in old synapse versions
|
|
||||||
----------------------------------------------------
|
|
||||||
|
|
||||||
A bug was introduced in Synapse 1.4.0 which could cause the room directory to
|
|
||||||
be incomplete or empty if Synapse was upgraded directly from v1.2.1 or
|
|
||||||
earlier, to versions between v1.4.0 and v1.12.x.
|
|
||||||
|
|
||||||
This will *not* be a problem for Synapse installations which were:
|
|
||||||
* created at v1.4.0 or later,
|
|
||||||
* upgraded via v1.3.x, or
|
|
||||||
* upgraded straight from v1.2.1 or earlier to v1.13.0 or later.
|
|
||||||
|
|
||||||
If completeness of the room directory is a concern, installations which are
|
|
||||||
affected can be repaired as follows:
|
|
||||||
|
|
||||||
1. Run the following sql from a `psql` or `sqlite3` console:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
|
|
||||||
('populate_stats_process_rooms', '{}', 'current_state_events_membership');
|
|
||||||
|
|
||||||
INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
|
|
||||||
('populate_stats_process_users', '{}', 'populate_stats_process_rooms');
|
|
||||||
|
|
||||||
2. Restart synapse.
|
|
||||||
|
|
||||||
New Single Sign-on HTML Templates
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
New templates (``sso_auth_confirm.html``, ``sso_auth_success.html``, and
|
|
||||||
``sso_account_deactivated.html``) were added to Synapse. If your Synapse is
|
|
||||||
configured to use SSO and a custom ``sso_redirect_confirm_template_dir``
|
|
||||||
configuration then these templates will need to be copied from
|
|
||||||
`synapse/res/templates <synapse/res/templates>`_ into that directory.
|
|
||||||
|
|
||||||
Synapse SSO Plugins Method Deprecation
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
Plugins using the ``complete_sso_login`` method of
|
|
||||||
``synapse.module_api.ModuleApi`` should update to using the async/await
|
|
||||||
version ``complete_sso_login_async`` which includes additional checks. The
|
|
||||||
non-async version is considered deprecated.
|
|
||||||
|
|
||||||
Rolling back to v1.12.4 after a failed upgrade
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
v1.13.0 includes a lot of large changes. If something problematic occurs, you
|
|
||||||
may want to roll-back to a previous version of Synapse. Because v1.13.0 also
|
|
||||||
includes a new database schema version, reverting that version is also required
|
|
||||||
alongside the generic rollback instructions mentioned above. In short, to roll
|
|
||||||
back to v1.12.4 you need to:
|
|
||||||
|
|
||||||
1. Stop the server
|
|
||||||
2. Decrease the schema version in the database:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
UPDATE schema_version SET version = 57;
|
|
||||||
|
|
||||||
3. Downgrade Synapse by following the instructions for your installation method
|
|
||||||
in the "Rolling back to older versions" section above.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.12.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
This version includes a database update which is run as part of the upgrade,
|
|
||||||
and which may take some time (several hours in the case of a large
|
|
||||||
server). Synapse will not respond to HTTP requests while this update is taking
|
|
||||||
place.
|
|
||||||
|
|
||||||
This is only likely to be a problem in the case of a server which is
|
|
||||||
participating in many rooms.
|
|
||||||
|
|
||||||
0. As with all upgrades, it is recommended that you have a recent backup of
|
|
||||||
your database which can be used for recovery in the event of any problems.
|
|
||||||
|
|
||||||
1. As an initial check to see if you will be affected, you can try running the
|
|
||||||
following query from the `psql` or `sqlite3` console. It is safe to run it
|
|
||||||
while Synapse is still running.
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
SELECT MAX(q.v) FROM (
|
|
||||||
SELECT (
|
|
||||||
SELECT ej.json AS v
|
|
||||||
FROM state_events se INNER JOIN event_json ej USING (event_id)
|
|
||||||
WHERE se.room_id=rooms.room_id AND se.type='m.room.create' AND se.state_key=''
|
|
||||||
LIMIT 1
|
|
||||||
) FROM rooms WHERE rooms.room_version IS NULL
|
|
||||||
) q;
|
|
||||||
|
|
||||||
This query will take about the same amount of time as the upgrade process: ie,
|
|
||||||
if it takes 5 minutes, then it is likely that Synapse will be unresponsive for
|
|
||||||
5 minutes during the upgrade.
|
|
||||||
|
|
||||||
If you consider an outage of this duration to be acceptable, no further
|
|
||||||
action is necessary and you can simply start Synapse 1.12.0.
|
|
||||||
|
|
||||||
If you would prefer to reduce the downtime, continue with the steps below.
|
|
||||||
|
|
||||||
2. The easiest workaround for this issue is to manually
|
|
||||||
create a new index before upgrading. On PostgreSQL, his can be done as follows:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
CREATE INDEX CONCURRENTLY tmp_upgrade_1_12_0_index
|
|
||||||
ON state_events(room_id) WHERE type = 'm.room.create';
|
|
||||||
|
|
||||||
The above query may take some time, but is also safe to run while Synapse is
|
|
||||||
running.
|
|
||||||
|
|
||||||
We assume that no SQLite users have databases large enough to be
|
|
||||||
affected. If you *are* affected, you can run a similar query, omitting the
|
|
||||||
``CONCURRENTLY`` keyword. Note however that this operation may in itself cause
|
|
||||||
Synapse to stop running for some time. Synapse admins are reminded that
|
|
||||||
`SQLite is not recommended for use outside a test
|
|
||||||
environment <https://github.com/matrix-org/synapse/blob/master/README.rst#using-postgresql>`_.
|
|
||||||
|
|
||||||
3. Once the index has been created, the ``SELECT`` query in step 1 above should
|
|
||||||
complete quickly. It is therefore safe to upgrade to Synapse 1.12.0.
|
|
||||||
|
|
||||||
4. Once Synapse 1.12.0 has successfully started and is responding to HTTP
|
|
||||||
requests, the temporary index can be removed:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DROP INDEX tmp_upgrade_1_12_0_index;
|
|
||||||
|
|
||||||
Upgrading to v1.10.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Synapse will now log a warning on start up if used with a PostgreSQL database
|
|
||||||
that has a non-recommended locale set.
|
|
||||||
|
|
||||||
See `docs/postgres.md <docs/postgres.md>`_ for details.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.8.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
Specifying a ``log_file`` config option will now cause Synapse to refuse to
|
|
||||||
start, and should be replaced by with the ``log_config`` option. Support for
|
|
||||||
the ``log_file`` option was removed in v1.3.0 and has since had no effect.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.7.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
In an attempt to configure Synapse in a privacy preserving way, the default
|
|
||||||
behaviours of ``allow_public_rooms_without_auth`` and
|
|
||||||
``allow_public_rooms_over_federation`` have been inverted. This means that by
|
|
||||||
default, only authenticated users querying the Client/Server API will be able
|
|
||||||
to query the room directory, and relatedly that the server will not share
|
|
||||||
room directory information with other servers over federation.
|
|
||||||
|
|
||||||
If your installation does not explicitly set these settings one way or the other
|
|
||||||
and you want either setting to be ``true`` then it will necessary to update
|
|
||||||
your homeserver configuration file accordingly.
|
|
||||||
|
|
||||||
For more details on the surrounding context see our `explainer
|
|
||||||
<https://matrix.org/blog/2019/11/09/avoiding-unwelcome-visitors-on-private-matrix-servers>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.5.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
This release includes a database migration which may take several minutes to
|
|
||||||
complete if there are a large number (more than a million or so) of entries in
|
|
||||||
the ``devices`` table. This is only likely to a be a problem on very large
|
|
||||||
installations.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.4.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
New custom templates
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
If you have configured a custom template directory with the
|
|
||||||
``email.template_dir`` option, be aware that there are new templates regarding
|
|
||||||
registration and threepid management (see below) that must be included.
|
|
||||||
|
|
||||||
* ``registration.html`` and ``registration.txt``
|
|
||||||
* ``registration_success.html`` and ``registration_failure.html``
|
|
||||||
* ``add_threepid.html`` and ``add_threepid.txt``
|
|
||||||
* ``add_threepid_failure.html`` and ``add_threepid_success.html``
|
|
||||||
|
|
||||||
Synapse will expect these files to exist inside the configured template
|
|
||||||
directory, and **will fail to start** if they are absent.
|
|
||||||
To view the default templates, see `synapse/res/templates
|
|
||||||
<https://github.com/matrix-org/synapse/tree/master/synapse/res/templates>`_.
|
|
||||||
|
|
||||||
3pid verification changes
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
**Note: As of this release, users will be unable to add phone numbers or email
|
|
||||||
addresses to their accounts, without changes to the Synapse configuration. This
|
|
||||||
includes adding an email address during registration.**
|
|
||||||
|
|
||||||
It is possible for a user to associate an email address or phone number
|
|
||||||
with their account, for a number of reasons:
|
|
||||||
|
|
||||||
* for use when logging in, as an alternative to the user id.
|
|
||||||
* in the case of email, as an alternative contact to help with account recovery.
|
|
||||||
* in the case of email, to receive notifications of missed messages.
|
|
||||||
|
|
||||||
Before an email address or phone number can be added to a user's account,
|
|
||||||
or before such an address is used to carry out a password-reset, Synapse must
|
|
||||||
confirm the operation with the owner of the email address or phone number.
|
|
||||||
It does this by sending an email or text giving the user a link or token to confirm
|
|
||||||
receipt. This process is known as '3pid verification'. ('3pid', or 'threepid',
|
|
||||||
stands for third-party identifier, and we use it to refer to external
|
|
||||||
identifiers such as email addresses and phone numbers.)
|
|
||||||
|
|
||||||
Previous versions of Synapse delegated the task of 3pid verification to an
|
|
||||||
identity server by default. In most cases this server is ``vector.im`` or
|
|
||||||
``matrix.org``.
|
|
||||||
|
|
||||||
In Synapse 1.4.0, for security and privacy reasons, the homeserver will no
|
|
||||||
longer delegate this task to an identity server by default. Instead,
|
|
||||||
the server administrator will need to explicitly decide how they would like the
|
|
||||||
verification messages to be sent.
|
|
||||||
|
|
||||||
In the medium term, the ``vector.im`` and ``matrix.org`` identity servers will
|
|
||||||
disable support for delegated 3pid verification entirely. However, in order to
|
|
||||||
ease the transition, they will retain the capability for a limited
|
|
||||||
period. Delegated email verification will be disabled on Monday 2nd December
|
|
||||||
2019 (giving roughly 2 months notice). Disabling delegated SMS verification
|
|
||||||
will follow some time after that once SMS verification support lands in
|
|
||||||
Synapse.
|
|
||||||
|
|
||||||
Once delegated 3pid verification support has been disabled in the ``vector.im`` and
|
|
||||||
``matrix.org`` identity servers, all Synapse versions that depend on those
|
|
||||||
instances will be unable to verify email and phone numbers through them. There
|
|
||||||
are no imminent plans to remove delegated 3pid verification from Sydent
|
|
||||||
generally. (Sydent is the identity server project that backs the ``vector.im`` and
|
|
||||||
``matrix.org`` instances).
|
|
||||||
|
|
||||||
Email
|
|
||||||
~~~~~
|
|
||||||
Following upgrade, to continue verifying email (e.g. as part of the
|
|
||||||
registration process), admins can either:-
|
|
||||||
|
|
||||||
* Configure Synapse to use an email server.
|
|
||||||
* Run or choose an identity server which allows delegated email verification
|
|
||||||
and delegate to it.
|
|
||||||
|
|
||||||
Configure SMTP in Synapse
|
|
||||||
+++++++++++++++++++++++++
|
|
||||||
|
|
||||||
To configure an SMTP server for Synapse, modify the configuration section
|
|
||||||
headed ``email``, and be sure to have at least the ``smtp_host, smtp_port``
|
|
||||||
and ``notif_from`` fields filled out.
|
|
||||||
|
|
||||||
You may also need to set ``smtp_user``, ``smtp_pass``, and
|
|
||||||
``require_transport_security``.
|
|
||||||
|
|
||||||
See the `sample configuration file <docs/sample_config.yaml>`_ for more details
|
|
||||||
on these settings.
|
|
||||||
|
|
||||||
Delegate email to an identity server
|
|
||||||
++++++++++++++++++++++++++++++++++++
|
|
||||||
|
|
||||||
Some admins will wish to continue using email verification as part of the
|
|
||||||
registration process, but will not immediately have an appropriate SMTP server
|
|
||||||
at hand.
|
|
||||||
|
|
||||||
To this end, we will continue to support email verification delegation via the
|
|
||||||
``vector.im`` and ``matrix.org`` identity servers for two months. Support for
|
|
||||||
delegated email verification will be disabled on Monday 2nd December.
|
|
||||||
|
|
||||||
The ``account_threepid_delegates`` dictionary defines whether the homeserver
|
|
||||||
should delegate an external server (typically an `identity server
|
|
||||||
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
|
|
||||||
confirmation messages via email and SMS.
|
|
||||||
|
|
||||||
So to delegate email verification, in ``homeserver.yaml``, set
|
|
||||||
``account_threepid_delegates.email`` to the base URL of an identity server. For
|
|
||||||
example:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
account_threepid_delegates:
|
|
||||||
email: https://example.com # Delegate email sending to example.com
|
|
||||||
|
|
||||||
Note that ``account_threepid_delegates.email`` replaces the deprecated
|
|
||||||
``email.trust_identity_server_for_password_resets``: if
|
|
||||||
``email.trust_identity_server_for_password_resets`` is set to ``true``, and
|
|
||||||
``account_threepid_delegates.email`` is not set, then the first entry in
|
|
||||||
``trusted_third_party_id_servers`` will be used as the
|
|
||||||
``account_threepid_delegate`` for email. This is to ensure compatibility with
|
|
||||||
existing Synapse installs that set up external server handling for these tasks
|
|
||||||
before v1.4.0. If ``email.trust_identity_server_for_password_resets`` is
|
|
||||||
``true`` and no trusted identity server domains are configured, Synapse will
|
|
||||||
report an error and refuse to start.
|
|
||||||
|
|
||||||
If ``email.trust_identity_server_for_password_resets`` is ``false`` or absent
|
|
||||||
and no ``email`` delegate is configured in ``account_threepid_delegates``,
|
|
||||||
then Synapse will send email verification messages itself, using the configured
|
|
||||||
SMTP server (see above).
|
|
||||||
that type.
|
|
||||||
|
|
||||||
Phone numbers
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Synapse does not support phone-number verification itself, so the only way to
|
|
||||||
maintain the ability for users to add phone numbers to their accounts will be
|
|
||||||
by continuing to delegate phone number verification to the ``matrix.org`` and
|
|
||||||
``vector.im`` identity servers (or another identity server that supports SMS
|
|
||||||
sending).
|
|
||||||
|
|
||||||
The ``account_threepid_delegates`` dictionary defines whether the homeserver
|
|
||||||
should delegate an external server (typically an `identity server
|
|
||||||
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
|
|
||||||
confirmation messages via email and SMS.
|
|
||||||
|
|
||||||
So to delegate phone number verification, in ``homeserver.yaml``, set
|
|
||||||
``account_threepid_delegates.msisdn`` to the base URL of an identity
|
|
||||||
server. For example:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
account_threepid_delegates:
|
|
||||||
msisdn: https://example.com # Delegate sms sending to example.com
|
|
||||||
|
|
||||||
The ``matrix.org`` and ``vector.im`` identity servers will continue to support
|
|
||||||
delegated phone number verification via SMS until such time as it is possible
|
|
||||||
for admins to configure their servers to perform phone number verification
|
|
||||||
directly. More details will follow in a future release.
|
|
||||||
|
|
||||||
Rolling back to v1.3.1
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
If you encounter problems with v1.4.0, it should be possible to roll back to
|
|
||||||
v1.3.1, subject to the following:
|
|
||||||
|
|
||||||
* The 'room statistics' engine was heavily reworked in this release (see
|
|
||||||
`#5971 <https://github.com/matrix-org/synapse/pull/5971>`_), including
|
|
||||||
significant changes to the database schema, which are not easily
|
|
||||||
reverted. This will cause the room statistics engine to stop updating when
|
|
||||||
you downgrade.
|
|
||||||
|
|
||||||
The room statistics are essentially unused in v1.3.1 (in future versions of
|
|
||||||
Synapse, they will be used to populate the room directory), so there should
|
|
||||||
be no loss of functionality. However, the statistics engine will write errors
|
|
||||||
to the logs, which can be avoided by setting the following in
|
|
||||||
`homeserver.yaml`:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
stats:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
Don't forget to re-enable it when you upgrade again, in preparation for its
|
|
||||||
use in the room directory!
|
|
||||||
|
|
||||||
Upgrading to v1.2.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
Some counter metrics have been renamed, with the old names deprecated. See
|
|
||||||
`the metrics documentation <docs/metrics-howto.md#renaming-of-metrics--deprecation-of-old-names-in-12>`_
|
|
||||||
for details.
|
|
||||||
|
|
||||||
Upgrading to v1.1.0
|
Upgrading to v1.1.0
|
||||||
===================
|
===================
|
||||||
@@ -901,19 +125,6 @@ server for password resets, set ``trust_identity_server_for_password_resets`` to
|
|||||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||||
for more details on these settings.
|
for more details on these settings.
|
||||||
|
|
||||||
New email templates
|
|
||||||
---------------
|
|
||||||
Some new templates have been added to the default template directory for the purpose of the
|
|
||||||
homeserver sending its own password reset emails. If you have configured a custom
|
|
||||||
``template_dir`` in your Synapse config, these files will need to be added.
|
|
||||||
|
|
||||||
``password_reset.html`` and ``password_reset.txt`` are HTML and plain text templates
|
|
||||||
respectively that contain the contents of what will be emailed to the user upon attempting to
|
|
||||||
reset their password via email. ``password_reset_success.html`` and
|
|
||||||
``password_reset_failure.html`` are HTML files that the content of which (assuming no redirect
|
|
||||||
URL is set) will be shown to the user after they attempt to click the link in the email sent
|
|
||||||
to them.
|
|
||||||
|
|
||||||
Upgrading to v0.99.0
|
Upgrading to v0.99.0
|
||||||
====================
|
====================
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
Add information about nginx normalisation to reverse_proxy.rst. Contributed by @skalarproduktraum - thanks!
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Added opentracing and configuration options.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add ability to pull all locally stored events out of synapse that a particular user can see.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Move logging code out of `synapse.util` and into `synapse.logging`.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Fix 'utime went backwards' errors on daemonization.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add a blacklist file to the repo to blacklist certain sytests from failing CI.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add `sender` and `origin_server_ts` fields to `m.replace`.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Make runtime errors surrounding password reset emails much clearer.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Move logging code out of `synapse.util` and into `synapse.logging`.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Base Docker image on a newer Alpine Linux version (3.8 -> 3.10).
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add missing space in default logging file format generated by the Docker image.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Various minor fixes to the federation request rate limiter.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Remove dead code for persiting outgoing federation transactions.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add default push rule to ignore reactions.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Remove support for the `invite_3pid_guest` configuration setting.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Include the original event when asking for its relations.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add `lint.sh` to the scripts-dev folder which will run all linting steps required by CI.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Move RegistrationHandler.get_or_create_user to test code.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add some more common python virtual-environment paths to the black exclusion list.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Unblacklist some user_directory sytests.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Fix requests to the `/store_invite` endpoint of identity servers being sent in the wrong format.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Factor out some redundant code in the login implementation.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Update ModuleApi to avoid register(generate_token=True).
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Remove access-token support from RegistrationHandler.register, and rename it.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Remove access-token support from `RegistrationStore.register`, and rename it.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Improve logging for auto-join when a new user is created.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Fix newly-registered users not being able to lookup their own profile without joining a room.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Remove unused and unnecessary check for FederationDeniedError in _exception_to_failure.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
--no-pep517 should be --no-use-pep517 in the documentation to setup the development environment.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Fix bug in #5626 that prevented the original_event field from actually having the contents of the original event in a call to `/relations`.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Fix a small typo in a code comment.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Clean up exception handling around client access tokens.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add a mechanism for per-test homeserver configuration in the unit tests.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Fix 3PID bind requests being sent to identity servers as `application/x-form-www-urlencoded` data, which is deprecated.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Inline issue_access_token.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Implement `session_lifetime` configuration option, after which access tokens will expire.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Improvements to Postgres setup instructions. Contributed by @Lrizika - thanks!
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Update the sytest BuildKite configuration to checkout Synapse in `/src`.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Add a `docker` type to the towncrier configuration.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Return "This account has been deactivated" when a deactivated user tries to login.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
Use `M_USER_DEACTIVATED` instead of `M_UNKNOWN` for errcode when a deactivated user attempts to login.
|
||||||
@@ -15,6 +15,11 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer, threads
|
||||||
|
from http import TwistedHttpClient
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import cmd
|
import cmd
|
||||||
import getpass
|
import getpass
|
||||||
@@ -23,20 +28,15 @@ import shlex
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import nacl.encoding
|
|
||||||
import nacl.signing
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, threads
|
import nacl.signing
|
||||||
|
import nacl.encoding
|
||||||
|
|
||||||
|
from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||||
|
|
||||||
CONFIG_JSON = "cmdclient_config.json"
|
CONFIG_JSON = "cmdclient_config.json"
|
||||||
|
|
||||||
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
|
|
||||||
# should be removed
|
|
||||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||||
|
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
return self.config["user"].split(":")[1]
|
return self.config["user"].split(":")[1]
|
||||||
|
|
||||||
def do_config(self, line):
|
def do_config(self, line):
|
||||||
"""Show the config for this client: "config"
|
""" Show the config for this client: "config"
|
||||||
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
||||||
Config variables:
|
Config variables:
|
||||||
user: The username to auth with.
|
user: The username to auth with.
|
||||||
@@ -268,7 +268,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailrequest(self, args):
|
def _do_emailrequest(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = (
|
url = (
|
||||||
self._identityServerUrl()
|
self._identityServerUrl()
|
||||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
@@ -303,7 +302,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailvalidate(self, args):
|
def _do_emailvalidate(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = (
|
url = (
|
||||||
self._identityServerUrl()
|
self._identityServerUrl()
|
||||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||||
@@ -332,7 +330,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_3pidbind(self, args):
|
def _do_3pidbind(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request(
|
||||||
@@ -361,7 +358,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
print(e)
|
print(e)
|
||||||
|
|
||||||
def do_topic(self, line):
|
def do_topic(self, line):
|
||||||
""" "topic [set|get] <roomid> [<newtopic>]"
|
""""topic [set|get] <roomid> [<newtopic>]"
|
||||||
Set the topic for a room: topic set <roomid> <newtopic>
|
Set the topic for a room: topic set <roomid> <newtopic>
|
||||||
Get the topic for a room: topic get <roomid>
|
Get the topic for a room: topic get <roomid>
|
||||||
"""
|
"""
|
||||||
@@ -401,7 +398,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_invite(self, roomid, userstring):
|
def _do_invite(self, roomid, userstring):
|
||||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request(
|
||||||
@@ -411,7 +407,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
mxid = None
|
mxid = None
|
||||||
|
|
||||||
if "mxid" in json_res and "signatures" in json_res:
|
if "mxid" in json_res and "signatures" in json_res:
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = (
|
url = (
|
||||||
self._identityServerUrl()
|
self._identityServerUrl()
|
||||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||||
@@ -491,7 +486,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"list messages <roomid> from=END&to=START&limit=3"
|
"list messages <roomid> from=END&to=START&limit=3"
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["type", "roomid", "qp"])
|
args = self._parse(line, ["type", "roomid", "qp"])
|
||||||
if "type" not in args or "roomid" not in args:
|
if not "type" in args or not "roomid" in args:
|
||||||
print("Must specify type and room ID.")
|
print("Must specify type and room ID.")
|
||||||
return
|
return
|
||||||
if args["type"] not in ["members", "messages"]:
|
if args["type"] not in ["members", "messages"]:
|
||||||
@@ -506,7 +501,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
key_value = key_value_str.split("=")
|
key_value = key_value_str.split("=")
|
||||||
qp[key_value[0]] = key_value[1]
|
qp[key_value[0]] = key_value[1]
|
||||||
except Exception:
|
except:
|
||||||
print("Bad query param: %s" % key_value)
|
print("Bad query param: %s" % key_value)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -583,7 +578,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
parsed_url = urlparse.urlparse(args["path"])
|
parsed_url = urlparse.urlparse(args["path"])
|
||||||
qp.update(urlparse.parse_qs(parsed_url.query))
|
qp.update(urlparse.parse_qs(parsed_url.query))
|
||||||
args["path"] = parsed_url.path
|
args["path"] = parsed_url.path
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
reactor.callFromThread(
|
reactor.callFromThread(
|
||||||
@@ -608,15 +603,13 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_event_stream(self, timeout):
|
def _do_event_stream(self, timeout):
|
||||||
res = yield defer.ensureDeferred(
|
res = yield self.http_client.get_json(
|
||||||
self.http_client.get_json(
|
self._url() + "/events",
|
||||||
self._url() + "/events",
|
{
|
||||||
{
|
"access_token": self._tok(),
|
||||||
"access_token": self._tok(),
|
"timeout": str(timeout),
|
||||||
"timeout": str(timeout),
|
"from": self.event_stream_token,
|
||||||
"from": self.event_stream_token,
|
},
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
print(json.dumps(res, indent=4))
|
print(json.dumps(res, indent=4))
|
||||||
|
|
||||||
@@ -691,7 +684,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
self._do_presence_state(2, line)
|
self._do_presence_state(2, line)
|
||||||
|
|
||||||
def _parse(self, line, keys, force_keys=False):
|
def _parse(self, line, keys, force_keys=False):
|
||||||
"""Parses the given line.
|
""" Parses the given line.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
line : The line to parse
|
line : The line to parse
|
||||||
@@ -719,10 +712,10 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: Optional[dict] = None,
|
query_params={"access_token": None},
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
"""Runs an HTTP request and pretty prints the output.
|
""" Runs an HTTP request and pretty prints the output.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
method: HTTP method
|
method: HTTP method
|
||||||
@@ -730,8 +723,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
data: Raw JSON data if any
|
data: Raw JSON data if any
|
||||||
query_params: dict of query parameters to add to the url
|
query_params: dict of query parameters to add to the url
|
||||||
"""
|
"""
|
||||||
query_params = query_params or {"access_token": None}
|
|
||||||
|
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
if "access_token" in query_params:
|
if "access_token" in query_params:
|
||||||
query_params["access_token"] = self._tok()
|
query_params["access_token"] = self._tok()
|
||||||
@@ -774,10 +765,10 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
print("Loaded config from %s" % config_path)
|
print("Loaded config from %s" % config_path)
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Twisted-specific: Runs the command processor in Twisted's event loop
|
# Twisted-specific: Runs the command processor in Twisted's event loop
|
||||||
|
|||||||
+28
-35
@@ -13,21 +13,23 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
from __future__ import print_function
|
||||||
import urllib
|
|
||||||
from pprint import pformat
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
from twisted.web.http_headers import Headers
|
from twisted.web.http_headers import Headers
|
||||||
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
|
||||||
class HttpClient:
|
class HttpClient(object):
|
||||||
"""Interface for talking json over http"""
|
""" Interface for talking json over http
|
||||||
|
"""
|
||||||
|
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
"""Sends the specifed json data using PUT
|
""" Sends the specifed json data using PUT
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str): The URL to PUT data to.
|
url (str): The URL to PUT data to.
|
||||||
@@ -41,7 +43,7 @@ class HttpClient:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
"""Gets some json from the given host homeserver and path
|
""" Gets some json from the given host homeserver and path
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str): The URL to GET data from.
|
url (str): The URL to GET data from.
|
||||||
@@ -58,7 +60,7 @@ class HttpClient:
|
|||||||
|
|
||||||
|
|
||||||
class TwistedHttpClient(HttpClient):
|
class TwistedHttpClient(HttpClient):
|
||||||
"""Wrapper around the twisted HTTP client api.
|
""" Wrapper around the twisted HTTP client api.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
||||||
@@ -86,9 +88,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
""" Wrapper of _create_request to issue a PUT request
|
||||||
headers_dict = headers_dict or {}
|
"""
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
@@ -97,22 +99,15 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
""" Wrapper of _create_request to issue a GET request
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
"""
|
||||||
|
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request(
|
def do_request(
|
||||||
self,
|
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||||
method,
|
|
||||||
url,
|
|
||||||
data=None,
|
|
||||||
qparams=None,
|
|
||||||
jsonreq=True,
|
|
||||||
headers: Optional[dict] = None,
|
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
|
||||||
|
|
||||||
if qparams:
|
if qparams:
|
||||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||||
|
|
||||||
@@ -133,12 +128,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
""" Creates and sends a request to the given url
|
||||||
):
|
"""
|
||||||
"""Creates and sends a request to the given url"""
|
|
||||||
headers_dict = headers_dict or {}
|
|
||||||
|
|
||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
@@ -177,7 +169,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class _RawProducer:
|
class _RawProducer(object):
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.body = data
|
self.body = data
|
||||||
@@ -194,8 +186,9 @@ class _RawProducer:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class _JsonProducer:
|
class _JsonProducer(object):
|
||||||
"""Used by the twisted http client to create the HTTP body from json"""
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
|
|||||||
+24
-11
@@ -1,26 +1,39 @@
|
|||||||
|
|
||||||
# Synapse Docker
|
# Synapse Docker
|
||||||
|
|
||||||
### Configuration
|
FIXME: this is out-of-date as of
|
||||||
|
https://github.com/matrix-org/synapse/issues/5518. Contributions to bring it up
|
||||||
|
to date would be welcome.
|
||||||
|
|
||||||
|
### Automated configuration
|
||||||
|
|
||||||
|
It is recommended that you use Docker Compose to run your containers, including
|
||||||
|
this image and a Postgres server. A sample ``docker-compose.yml`` is provided,
|
||||||
|
including example labels for reverse proxying and other artifacts.
|
||||||
|
|
||||||
|
Read the section about environment variables and set at least mandatory variables,
|
||||||
|
then run the server:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
If secrets are not specified in the environment variables, they will be generated
|
||||||
|
as part of the startup. Please ensure these secrets are kept between launches of the
|
||||||
|
Docker container, as their loss may require users to log in again.
|
||||||
|
|
||||||
|
### Manual configuration
|
||||||
|
|
||||||
A sample ``docker-compose.yml`` is provided, including example labels for
|
A sample ``docker-compose.yml`` is provided, including example labels for
|
||||||
reverse proxying and other artifacts. The docker-compose file is an example,
|
reverse proxying and other artifacts. The docker-compose file is an example,
|
||||||
please comment/uncomment sections that are not suitable for your usecase.
|
please comment/uncomment sections that are not suitable for your usecase.
|
||||||
|
|
||||||
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
|
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
|
||||||
to use manual configuration.
|
to use manual configuration. To generate a fresh ``homeserver.yaml``, simply run:
|
||||||
|
|
||||||
To generate a fresh `homeserver.yaml`, you can use the `generate` command.
|
|
||||||
(See the [documentation](../../docker/README.md#generating-a-configuration-file)
|
|
||||||
for more information.) You will need to specify appropriate values for at least the
|
|
||||||
`SYNAPSE_SERVER_NAME` and `SYNAPSE_REPORT_STATS` environment variables. For example:
|
|
||||||
|
|
||||||
```
|
```
|
||||||
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host -e SYNAPSE_REPORT_STATS=yes synapse generate
|
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host synapse generate
|
||||||
```
|
```
|
||||||
|
|
||||||
(This will also generate necessary signing keys.)
|
|
||||||
|
|
||||||
Then, customize your configuration and run the server:
|
Then, customize your configuration and run the server:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -15,7 +15,11 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# See the readme for a full documentation of the environment settings
|
# See the readme for a full documentation of the environment settings
|
||||||
environment:
|
environment:
|
||||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
- SYNAPSE_SERVER_NAME=my.matrix.host
|
||||||
|
- SYNAPSE_REPORT_STATS=no
|
||||||
|
- SYNAPSE_ENABLE_REGISTRATION=yes
|
||||||
|
- SYNAPSE_LOG_LEVEL=INFO
|
||||||
|
- POSTGRES_PASSWORD=changeme
|
||||||
volumes:
|
volumes:
|
||||||
# You may either store all the files in a local folder
|
# You may either store all the files in a local folder
|
||||||
- ./files:/data
|
- ./files:/data
|
||||||
@@ -31,33 +35,16 @@ services:
|
|||||||
- 8448:8448/tcp
|
- 8448:8448/tcp
|
||||||
# ... or use a reverse proxy, here is an example for traefik:
|
# ... or use a reverse proxy, here is an example for traefik:
|
||||||
labels:
|
labels:
|
||||||
# The following lines are valid for Traefik version 1.x:
|
|
||||||
- traefik.enable=true
|
- traefik.enable=true
|
||||||
- traefik.frontend.rule=Host:my.matrix.Host
|
- traefik.frontend.rule=Host:my.matrix.Host
|
||||||
- traefik.port=8008
|
- traefik.port=8008
|
||||||
# Alternatively, for Traefik version 2.0:
|
|
||||||
- traefik.enable=true
|
|
||||||
- traefik.http.routers.http-synapse.entryPoints=http
|
|
||||||
- traefik.http.routers.http-synapse.rule=Host(`my.matrix.host`)
|
|
||||||
- traefik.http.middlewares.https_redirect.redirectscheme.scheme=https
|
|
||||||
- traefik.http.middlewares.https_redirect.redirectscheme.permanent=true
|
|
||||||
- traefik.http.routers.http-synapse.middlewares=https_redirect
|
|
||||||
- traefik.http.routers.https-synapse.entryPoints=https
|
|
||||||
- traefik.http.routers.https-synapse.rule=Host(`my.matrix.host`)
|
|
||||||
- traefik.http.routers.https-synapse.service=synapse
|
|
||||||
- traefik.http.routers.https-synapse.tls=true
|
|
||||||
- traefik.http.services.synapse.loadbalancer.server.port=8008
|
|
||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:12-alpine
|
image: docker.io/postgres:10-alpine
|
||||||
# Change that password, of course!
|
# Change that password, of course!
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
- POSTGRES_PASSWORD=changeme
|
- POSTGRES_PASSWORD=changeme
|
||||||
# ensure the database gets created correctly
|
|
||||||
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
|
|
||||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
|
||||||
volumes:
|
volumes:
|
||||||
# You may store the database tables in a local folder..
|
# You may store the database tables in a local folder..
|
||||||
- ./schemas:/var/lib/postgresql/data
|
- ./schemas:/var/lib/postgresql/data
|
||||||
|
|||||||
@@ -63,7 +63,8 @@ class CursesStdIO:
|
|||||||
self.redraw()
|
self.redraw()
|
||||||
|
|
||||||
def redraw(self):
|
def redraw(self):
|
||||||
"""method for redisplaying lines based on internal list of lines"""
|
""" method for redisplaying lines
|
||||||
|
based on internal list of lines """
|
||||||
|
|
||||||
self.stdscr.clear()
|
self.stdscr.clear()
|
||||||
self.paintStatus(self.statusText)
|
self.paintStatus(self.statusText)
|
||||||
@@ -140,7 +141,7 @@ class CursesStdIO:
|
|||||||
curses.endwin()
|
curses.endwin()
|
||||||
|
|
||||||
|
|
||||||
class Callback:
|
class Callback(object):
|
||||||
def __init__(self, stdio):
|
def __init__(self, stdio):
|
||||||
self.stdio = stdio
|
self.stdio = stdio
|
||||||
|
|
||||||
|
|||||||
@@ -28,24 +28,27 @@ Currently assumes the local address is localhost:<port>
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from synapse.federation import ReplicationHandler
|
||||||
|
|
||||||
|
from synapse.federation.units import Pdu
|
||||||
|
|
||||||
|
from synapse.util import origin_from_ucid
|
||||||
|
|
||||||
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
|
||||||
|
# from synapse.logging.utils import log_function
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer
|
||||||
|
from twisted.python import log
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import curses.wrapper
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import cursesio
|
import cursesio
|
||||||
|
import curses.wrapper
|
||||||
from twisted.internet import defer, reactor
|
|
||||||
from twisted.python import log
|
|
||||||
|
|
||||||
from synapse.app.homeserver import SynapseHomeServer
|
|
||||||
from synapse.federation import ReplicationHandler
|
|
||||||
from synapse.federation.units import Pdu
|
|
||||||
from synapse.util import origin_from_ucid
|
|
||||||
|
|
||||||
# from synapse.logging.utils import log_function
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("example")
|
logger = logging.getLogger("example")
|
||||||
@@ -55,8 +58,8 @@ def excpetion_errback(failure):
|
|||||||
logging.exception(failure)
|
logging.exception(failure)
|
||||||
|
|
||||||
|
|
||||||
class InputOutput:
|
class InputOutput(object):
|
||||||
"""This is responsible for basic I/O so that a user can interact with
|
""" This is responsible for basic I/O so that a user can interact with
|
||||||
the example app.
|
the example app.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -68,19 +71,20 @@ class InputOutput:
|
|||||||
self.server = server
|
self.server = server
|
||||||
|
|
||||||
def on_line(self, line):
|
def on_line(self, line):
|
||||||
"""This is where we process commands."""
|
""" This is where we process commands.
|
||||||
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
m = re.match(r"^join (\S+)$", line)
|
m = re.match("^join (\S+)$", line)
|
||||||
if m:
|
if m:
|
||||||
# The `sender` wants to join a room.
|
# The `sender` wants to join a room.
|
||||||
(room_name,) = m.groups()
|
room_name, = m.groups()
|
||||||
self.print_line("%s joining %s" % (self.user, room_name))
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
self.server.join_room(room_name, self.user, self.user)
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
# self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match(r"^invite (\S+) (\S+)$", line)
|
m = re.match("^invite (\S+) (\S+)$", line)
|
||||||
if m:
|
if m:
|
||||||
# `sender` wants to invite someone to a room
|
# `sender` wants to invite someone to a room
|
||||||
room_name, invitee = m.groups()
|
room_name, invitee = m.groups()
|
||||||
@@ -89,7 +93,7 @@ class InputOutput:
|
|||||||
# self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match(r"^send (\S+) (.*)$", line)
|
m = re.match("^send (\S+) (.*)$", line)
|
||||||
if m:
|
if m:
|
||||||
# `sender` wants to message a room
|
# `sender` wants to message a room
|
||||||
room_name, body = m.groups()
|
room_name, body = m.groups()
|
||||||
@@ -98,10 +102,10 @@ class InputOutput:
|
|||||||
# self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match(r"^backfill (\S+)$", line)
|
m = re.match("^backfill (\S+)$", line)
|
||||||
if m:
|
if m:
|
||||||
# we want to backfill a room
|
# we want to backfill a room
|
||||||
(room_name,) = m.groups()
|
room_name, = m.groups()
|
||||||
self.print_line("backfill %s" % room_name)
|
self.print_line("backfill %s" % room_name)
|
||||||
self.server.backfill(room_name)
|
self.server.backfill(room_name)
|
||||||
return
|
return
|
||||||
@@ -131,8 +135,8 @@ class IOLoggerHandler(logging.Handler):
|
|||||||
self.io.print_log(msg)
|
self.io.print_log(msg)
|
||||||
|
|
||||||
|
|
||||||
class Room:
|
class Room(object):
|
||||||
"""Used to store (in memory) the current membership state of a room, and
|
""" Used to store (in memory) the current membership state of a room, and
|
||||||
which home servers we should send PDUs associated with the room to.
|
which home servers we should send PDUs associated with the room to.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -147,7 +151,8 @@ class Room:
|
|||||||
self.have_got_metadata = False
|
self.have_got_metadata = False
|
||||||
|
|
||||||
def add_participant(self, participant):
|
def add_participant(self, participant):
|
||||||
"""Someone has joined the room"""
|
""" Someone has joined the room
|
||||||
|
"""
|
||||||
self.participants.add(participant)
|
self.participants.add(participant)
|
||||||
self.invited.discard(participant)
|
self.invited.discard(participant)
|
||||||
|
|
||||||
@@ -158,13 +163,14 @@ class Room:
|
|||||||
self.oldest_server = server
|
self.oldest_server = server
|
||||||
|
|
||||||
def add_invited(self, invitee):
|
def add_invited(self, invitee):
|
||||||
"""Someone has been invited to the room"""
|
""" Someone has been invited to the room
|
||||||
|
"""
|
||||||
self.invited.add(invitee)
|
self.invited.add(invitee)
|
||||||
self.servers.add(origin_from_ucid(invitee))
|
self.servers.add(origin_from_ucid(invitee))
|
||||||
|
|
||||||
|
|
||||||
class HomeServer(ReplicationHandler):
|
class HomeServer(ReplicationHandler):
|
||||||
"""A very basic home server implentation that allows people to join a
|
""" A very basic home server implentation that allows people to join a
|
||||||
room and then invite other people.
|
room and then invite other people.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -178,7 +184,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
self.output = output
|
self.output = output
|
||||||
|
|
||||||
def on_receive_pdu(self, pdu):
|
def on_receive_pdu(self, pdu):
|
||||||
"""We just received a PDU"""
|
""" We just received a PDU
|
||||||
|
"""
|
||||||
pdu_type = pdu.pdu_type
|
pdu_type = pdu.pdu_type
|
||||||
|
|
||||||
if pdu_type == "sy.room.message":
|
if pdu_type == "sy.room.message":
|
||||||
@@ -194,21 +201,34 @@ class HomeServer(ReplicationHandler):
|
|||||||
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# def on_state_change(self, pdu):
|
||||||
|
##self.output.print_line("#%s (state) %s *** %s" %
|
||||||
|
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||||
|
##)
|
||||||
|
|
||||||
|
# if "joinee" in pdu.content:
|
||||||
|
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||||
|
# elif "invitee" in pdu.content:
|
||||||
|
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||||
|
|
||||||
def _on_message(self, pdu):
|
def _on_message(self, pdu):
|
||||||
"""We received a message"""
|
""" We received a message
|
||||||
|
"""
|
||||||
self.output.print_line(
|
self.output.print_line(
|
||||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_join(self, context, joinee):
|
def _on_join(self, context, joinee):
|
||||||
"""Someone has joined a room, either a remote user or a local user"""
|
""" Someone has joined a room, either a remote user or a local user
|
||||||
|
"""
|
||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_participant(joinee)
|
room.add_participant(joinee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
|
|
||||||
def _on_invite(self, origin, context, invitee):
|
def _on_invite(self, origin, context, invitee):
|
||||||
"""Someone has been invited"""
|
""" Someone has been invited
|
||||||
|
"""
|
||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_invited(invitee)
|
room.add_invited(invitee)
|
||||||
|
|
||||||
@@ -221,7 +241,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send_message(self, room_name, sender, body):
|
def send_message(self, room_name, sender, body):
|
||||||
"""Send a message to a room!"""
|
""" Send a message to a room!
|
||||||
|
"""
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -239,7 +260,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def join_room(self, room_name, sender, joinee):
|
def join_room(self, room_name, sender, joinee):
|
||||||
"""Join a room!"""
|
""" Join a room!
|
||||||
|
"""
|
||||||
self._on_join(room_name, joinee)
|
self._on_join(room_name, joinee)
|
||||||
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
@@ -260,7 +282,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def invite_to_room(self, room_name, sender, invitee):
|
def invite_to_room(self, room_name, sender, invitee):
|
||||||
"""Invite someone to a room!"""
|
""" Invite someone to a room!
|
||||||
|
"""
|
||||||
self._on_invite(self.server_name, room_name, invitee)
|
self._on_invite(self.server_name, room_name, invitee)
|
||||||
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
@@ -291,7 +314,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
return self.replication_layer.backfill(dest, room_name, limit)
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
def _get_room_remote_servers(self, room_name):
|
def _get_room_remote_servers(self, room_name):
|
||||||
return list(self.joined_rooms.setdefault(room_name).servers)
|
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||||
|
|
||||||
def _get_or_create_room(self, room_name):
|
def _get_or_create_room(self, room_name):
|
||||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
@@ -311,12 +334,12 @@ def main(stdscr):
|
|||||||
user = args.user
|
user = args.user
|
||||||
server_name = origin_from_ucid(user)
|
server_name = origin_from_ucid(user)
|
||||||
|
|
||||||
# Set up logging
|
## Set up logging ##
|
||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
formatter = logging.Formatter(
|
formatter = logging.Formatter(
|
||||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||||
)
|
)
|
||||||
if not os.path.exists("logs"):
|
if not os.path.exists("logs"):
|
||||||
os.makedirs("logs")
|
os.makedirs("logs")
|
||||||
@@ -331,7 +354,7 @@ def main(stdscr):
|
|||||||
observer = log.PythonLoggingObserver()
|
observer = log.PythonLoggingObserver()
|
||||||
observer.start()
|
observer.start()
|
||||||
|
|
||||||
# Set up synapse server
|
## Set up synapse server
|
||||||
|
|
||||||
curses_stdio = cursesio.CursesStdIO(stdscr)
|
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||||
input_output = InputOutput(curses_stdio, user)
|
input_output = InputOutput(curses_stdio, user)
|
||||||
@@ -345,16 +368,16 @@ def main(stdscr):
|
|||||||
|
|
||||||
input_output.set_home_server(hs)
|
input_output.set_home_server(hs)
|
||||||
|
|
||||||
# Add input_output logger
|
## Add input_output logger
|
||||||
io_logger = IOLoggerHandler(input_output)
|
io_logger = IOLoggerHandler(input_output)
|
||||||
io_logger.setFormatter(formatter)
|
io_logger.setFormatter(formatter)
|
||||||
root_logger.addHandler(io_logger)
|
root_logger.addHandler(io_logger)
|
||||||
|
|
||||||
# Start!
|
## Start! ##
|
||||||
|
|
||||||
try:
|
try:
|
||||||
port = int(server_name.split(":")[1])
|
port = int(server_name.split(":")[1])
|
||||||
except Exception:
|
except:
|
||||||
port = 12345
|
port = 12345
|
||||||
|
|
||||||
app_hs.get_http_server().start_listening(port)
|
app_hs.get_http_server().start_listening(port)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Using the Synapse Grafana dashboard
|
# Using the Synapse Grafana dashboard
|
||||||
|
|
||||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
|
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
|
||||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||||
3. Set up required recording rules. https://github.com/matrix-org/synapse/tree/master/contrib/prometheus
|
3. Set up additional recording rules
|
||||||
|
|||||||
+134
-881
File diff suppressed because it is too large
Load Diff
+12
-9
@@ -1,10 +1,4 @@
|
|||||||
import argparse
|
from __future__ import print_function
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pydot
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
@@ -21,6 +15,15 @@ import urllib2
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
def make_name(pdu_id, origin):
|
def make_name(pdu_id, origin):
|
||||||
return "%s@%s" % (pdu_id, origin)
|
return "%s@%s" % (pdu_id, origin)
|
||||||
|
|
||||||
@@ -30,7 +33,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
origins = set()
|
origins = set()
|
||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = set(("red", "green", "blue", "yellow", "purple"))
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
@@ -46,7 +49,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
try:
|
try:
|
||||||
c = colors.pop()
|
c = colors.pop()
|
||||||
color_map[o] = c
|
color_map[o] = c
|
||||||
except Exception:
|
except:
|
||||||
print("Run out of colours!")
|
print("Run out of colours!")
|
||||||
color_map[o] = "black"
|
color_map[o] = "black"
|
||||||
|
|
||||||
|
|||||||
@@ -13,13 +13,12 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
|
||||||
from synapse.events import FrozenEvent
|
from synapse.events import FrozenEvent
|
||||||
from synapse.util.frozenutils import unfreeze
|
from synapse.util.frozenutils import unfreeze
|
||||||
@@ -37,7 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
args = [room_id]
|
args = [room_id]
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
|
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||||
|
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
@@ -54,7 +53,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
c = conn.execute(
|
c = conn.execute(
|
||||||
"SELECT state_group FROM event_to_state_groups WHERE event_id = ?",
|
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||||
(event.event_id,),
|
(event.event_id,),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -99,7 +98,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
for prev_id, _ in event.prev_events:
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except:
|
||||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
|
|||||||
+15
-11
@@ -1,12 +1,4 @@
|
|||||||
import argparse
|
from __future__ import print_function
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
import pydot
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
from synapse.events import FrozenEvent
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
|
||||||
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
@@ -23,6 +15,18 @@ from synapse.util.frozenutils import unfreeze
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import simplejson as json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
|
from six import string_types
|
||||||
|
|
||||||
|
|
||||||
def make_graph(file_name, room_id, file_prefix, limit):
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
print("Reading lines")
|
print("Reading lines")
|
||||||
with open(file_name) as f:
|
with open(file_name) as f:
|
||||||
@@ -58,7 +62,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||||
if value is None:
|
if value is None:
|
||||||
value = "<null>"
|
value = "<null>"
|
||||||
elif isinstance(value, str):
|
elif isinstance(value, string_types):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
value = json.dumps(value)
|
value = json.dumps(value)
|
||||||
@@ -104,7 +108,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
for prev_id, _ in event.prev_events:
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except:
|
||||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
|
|||||||
@@ -10,15 +10,17 @@ the bridge.
|
|||||||
Requires:
|
Requires:
|
||||||
npm install jquery jsdom
|
npm install jquery jsdom
|
||||||
"""
|
"""
|
||||||
import json
|
from __future__ import print_function
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
import grequests
|
import grequests
|
||||||
from BeautifulSoup import BeautifulSoup
|
from BeautifulSoup import BeautifulSoup
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
ACCESS_TOKEN = ""
|
# ACCESS_TOKEN="" #
|
||||||
|
|
||||||
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||||
MYUSERNAME = "@davetest:matrix.org"
|
MYUSERNAME = "@davetest:matrix.org"
|
||||||
@@ -193,12 +195,15 @@ class TrivialXmppClient:
|
|||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
print("SSRC spammer started")
|
print("SSRC spammer started")
|
||||||
while self.running:
|
while self.running:
|
||||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
ssrcMsg = (
|
||||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||||
"nick": self.userId,
|
% {
|
||||||
"assrc": self.ssrcs["audio"],
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
"vssrc": self.ssrcs["video"],
|
"nick": self.userId,
|
||||||
}
|
"assrc": self.ssrcs["audio"],
|
||||||
|
"vssrc": self.ssrcs["video"],
|
||||||
|
}
|
||||||
|
)
|
||||||
res = self.sendIq(ssrcMsg)
|
res = self.sendIq(ssrcMsg)
|
||||||
print("reply from ssrc announce: ", res)
|
print("reply from ssrc announce: ", res)
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ Add a new job to the main prometheus.conf file:
|
|||||||
```
|
```
|
||||||
|
|
||||||
### for Prometheus v2
|
### for Prometheus v2
|
||||||
|
|
||||||
Add a new job to the main prometheus.yml file:
|
Add a new job to the main prometheus.yml file:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -30,17 +29,14 @@ Add a new job to the main prometheus.yml file:
|
|||||||
scheme: "https"
|
scheme: "https"
|
||||||
|
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["my.server.here:port"]
|
- targets: ['SERVER.LOCATION:PORT']
|
||||||
```
|
```
|
||||||
|
|
||||||
An example of a Prometheus configuration with workers can be found in
|
|
||||||
[metrics-howto.md](https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md).
|
|
||||||
|
|
||||||
To use `synapse.rules` add
|
To use `synapse.rules` add
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
rule_files:
|
rule_files:
|
||||||
- "/PATH/TO/synapse-v2.rules"
|
- "/PATH/TO/synapse-v2.rules"
|
||||||
```
|
```
|
||||||
|
|
||||||
Metrics are disabled by default when running synapse; they must be enabled
|
Metrics are disabled by default when running synapse; they must be enabled
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_resource_utime"),
|
node: document.querySelector("#process_resource_utime"),
|
||||||
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
||||||
name: "[[job]]-[[index]]",
|
name: "[[job]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
max: 100,
|
max: 100,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
@@ -22,12 +22,12 @@ new PromConsole.Graph({
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Memory</h3>
|
<h3>Memory</h3>
|
||||||
<div id="process_resident_memory_bytes"></div>
|
<div id="process_resource_maxrss"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_resident_memory_bytes"),
|
node: document.querySelector("#process_resource_maxrss"),
|
||||||
expr: "process_resident_memory_bytes",
|
expr: "process_psutil_rss:max",
|
||||||
name: "[[job]]-[[index]]",
|
name: "Maxrss",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -43,8 +43,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_fds"),
|
node: document.querySelector("#process_fds"),
|
||||||
expr: "process_open_fds",
|
expr: "process_open_fds{job='synapse'}",
|
||||||
name: "[[job]]-[[index]]",
|
name: "FDs",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -62,8 +62,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_total_time"),
|
node: document.querySelector("#reactor_total_time"),
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m])",
|
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "time",
|
||||||
max: 1,
|
max: 1,
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "area",
|
renderer: "area",
|
||||||
@@ -80,8 +80,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_average_time"),
|
node: document.querySelector("#reactor_average_time"),
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m]) / rate(python_twisted_reactor_tick_time_count[2m])",
|
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / rate(python_twisted_reactor_tick_time:count[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "time",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -97,14 +97,14 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_pending_calls"),
|
node: document.querySelector("#reactor_pending_calls"),
|
||||||
expr: "rate(python_twisted_reactor_pending_calls_sum[30s]) / rate(python_twisted_reactor_pending_calls_count[30s])",
|
expr: "rate(python_twisted_reactor_pending_calls:total[30s])/rate(python_twisted_reactor_pending_calls:count[30s])",
|
||||||
name: "[[job]]-[[index]]",
|
name: "calls",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yTitle: "Pending Calls"
|
yTitle: "Pending Cals"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -115,7 +115,7 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_query_time"),
|
node: document.querySelector("#synapse_storage_query_time"),
|
||||||
expr: "sum(rate(synapse_storage_query_time_count[2m])) by (verb)",
|
expr: "rate(synapse_storage_query_time:count[2m])",
|
||||||
name: "[[verb]]",
|
name: "[[verb]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -129,8 +129,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_transaction_time"),
|
node: document.querySelector("#synapse_storage_transaction_time"),
|
||||||
expr: "topk(10, rate(synapse_storage_transaction_time_count[2m]))",
|
expr: "rate(synapse_storage_transaction_time:count[2m])",
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
name: "[[desc]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -140,12 +140,12 @@ new PromConsole.Graph({
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Transaction execution time</h3>
|
<h3>Transaction execution time</h3>
|
||||||
<div id="synapse_storage_transactions_time_sec"></div>
|
<div id="synapse_storage_transactions_time_msec"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_transactions_time_sec"),
|
node: document.querySelector("#synapse_storage_transactions_time_msec"),
|
||||||
expr: "rate(synapse_storage_transaction_time_sum[2m])",
|
expr: "rate(synapse_storage_transaction_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
name: "[[desc]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
@@ -154,33 +154,34 @@ new PromConsole.Graph({
|
|||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Average time waiting for database connection</h3>
|
<h3>Database scheduling latency</h3>
|
||||||
<div id="synapse_storage_avg_waiting_time"></div>
|
<div id="synapse_storage_schedule_time"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_avg_waiting_time"),
|
node: document.querySelector("#synapse_storage_schedule_time"),
|
||||||
expr: "rate(synapse_storage_schedule_time_sum[2m]) / rate(synapse_storage_schedule_time_count[2m])",
|
expr: "rate(synapse_storage_schedule_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "Total latency",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s",
|
yUnits: "s/s",
|
||||||
yTitle: "Time"
|
yTitle: "Usage"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Cache request rate</h3>
|
<h3>Cache hit ratio</h3>
|
||||||
<div id="synapse_cache_request_rate"></div>
|
<div id="synapse_cache_ratio"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_cache_request_rate"),
|
node: document.querySelector("#synapse_cache_ratio"),
|
||||||
expr: "rate(synapse_util_caches_cache:total[2m])",
|
expr: "rate(synapse_util_caches_cache:total[2m]) * 100",
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
name: "[[name]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
|
max: 100,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yUnits: "rps",
|
yUnits: "%",
|
||||||
yTitle: "Cache request rate"
|
yTitle: "Percentage"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -190,7 +191,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_cache_size"),
|
node: document.querySelector("#synapse_cache_size"),
|
||||||
expr: "synapse_util_caches_cache:size",
|
expr: "synapse_util_caches_cache:size",
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
name: "[[name]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yUnits: "",
|
yUnits: "",
|
||||||
@@ -205,8 +206,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count[2m])",
|
expr: "rate(synapse_http_server_request_count:servlet[2m])",
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -218,8 +219,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
expr: "rate(synapse_http_server_request_count:servlet{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -232,8 +233,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum[2m]) / rate(synapse_http_server_response_count[2m])",
|
expr: "rate(synapse_http_server_response_time_seconds[2m]) / rate(synapse_http_server_response_count[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/req",
|
yUnits: "s/req",
|
||||||
@@ -276,7 +277,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
||||||
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/s",
|
yUnits: "s/s",
|
||||||
@@ -291,7 +292,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
||||||
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/s",
|
yUnits: "s/s",
|
||||||
@@ -305,8 +306,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m])",
|
expr: "rate(synapse_http_server_response_time_second{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/req",
|
yUnits: "s/req",
|
||||||
@@ -322,7 +323,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_federation_client_sent"),
|
node: document.querySelector("#synapse_federation_client_sent"),
|
||||||
expr: "rate(synapse_federation_client_sent[2m])",
|
expr: "rate(synapse_federation_client_sent[2m])",
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
name: "[[type]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -336,7 +337,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_federation_server_received"),
|
node: document.querySelector("#synapse_federation_server_received"),
|
||||||
expr: "rate(synapse_federation_server_received[2m])",
|
expr: "rate(synapse_federation_server_received[2m])",
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
name: "[[type]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -366,7 +367,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_notifier_listeners"),
|
node: document.querySelector("#synapse_notifier_listeners"),
|
||||||
expr: "synapse_notifier_listeners",
|
expr: "synapse_notifier_listeners",
|
||||||
name: "[[job]]-[[index]]",
|
name: "listeners",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -381,7 +382,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_notifier_notified_events"),
|
node: document.querySelector("#synapse_notifier_notified_events"),
|
||||||
expr: "rate(synapse_notifier_notified_events[2m])",
|
expr: "rate(synapse_notifier_notified_events[2m])",
|
||||||
name: "[[job]]-[[index]]",
|
name: "events",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "events/s",
|
yUnits: "events/s",
|
||||||
|
|||||||
@@ -58,21 +58,3 @@ groups:
|
|||||||
labels:
|
labels:
|
||||||
type: "PDU"
|
type: "PDU"
|
||||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||||
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
|
||||||
labels:
|
|
||||||
type: remote
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: local
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: bridges
|
|
||||||
- record: synapse_storage_events_persisted_by_event_type
|
|
||||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
|
||||||
- record: synapse_storage_events_persisted_by_origin
|
|
||||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
|
|
||||||
# this script will use the api:
|
# this script will use the api:
|
||||||
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
|
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
|
|
||||||
DOMAIN=yourserver.tld
|
DOMAIN=yourserver.tld
|
||||||
# add this user as admin in your home server:
|
# add this user as admin in your home server:
|
||||||
@@ -51,4 +51,4 @@ TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id
|
|||||||
# finally start pruning media:
|
# finally start pruning media:
|
||||||
###############################################################################
|
###############################################################################
|
||||||
set -x # for debugging the generated string
|
set -x # for debugging the generated string
|
||||||
curl --header "Authorization: Bearer $TOKEN" -X POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
curl --header "Authorization: Bearer $TOKEN" -v POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
||||||
|
|||||||
@@ -1,11 +1,15 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
|
from argparse import ArgumentParser
|
||||||
import json
|
import json
|
||||||
|
import requests
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
from argparse import ArgumentParser
|
|
||||||
|
|
||||||
import requests
|
try:
|
||||||
|
raw_input
|
||||||
|
except NameError: # Python 3
|
||||||
|
raw_input = input
|
||||||
|
|
||||||
|
|
||||||
def _mkurl(template, kws):
|
def _mkurl(template, kws):
|
||||||
@@ -52,7 +56,7 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
|||||||
print("The following user IDs will be kicked from %s" % room_name)
|
print("The following user IDs will be kicked from %s" % room_name)
|
||||||
for uid in kick_list:
|
for uid in kick_list:
|
||||||
print(uid)
|
print(uid)
|
||||||
doit = input("Continue? [Y]es\n")
|
doit = raw_input("Continue? [Y]es\n")
|
||||||
if len(doit) > 0 and doit.lower() == "y":
|
if len(doit) > 0 and doit.lower() == "y":
|
||||||
print("Kicking members...")
|
print("Kicking members...")
|
||||||
# encode them all
|
# encode them all
|
||||||
|
|||||||
@@ -1,2 +1,150 @@
|
|||||||
The documentation for using systemd to manage synapse workers is now part of
|
# Setup Synapse with Workers and Systemd
|
||||||
the main synapse distribution. See [docs/systemd-with-workers](../../docs/systemd-with-workers).
|
|
||||||
|
This is a setup for managing synapse with systemd including support for
|
||||||
|
managing workers. It provides a `matrix-synapse`, as well as a
|
||||||
|
`matrix-synapse-worker@` service for any workers you require. Additionally to
|
||||||
|
group the required services it sets up a `matrix.target`. You can use this to
|
||||||
|
automatically start any bot- or bridge-services. More on this in
|
||||||
|
[Bots and Bridges](#bots-and-bridges).
|
||||||
|
|
||||||
|
See the folder [system](system) for any service and target files.
|
||||||
|
|
||||||
|
The folder [workers](workers) contains an example configuration for the
|
||||||
|
`federation_reader` worker. Pay special attention to the name of the
|
||||||
|
configuration file. In order to work with the `matrix-synapse-worker@.service`
|
||||||
|
service, it needs to have the exact same name as the worker app.
|
||||||
|
|
||||||
|
This setup expects neither the homeserver nor any workers to fork. Forking is
|
||||||
|
handled by systemd.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. Adjust your matrix configs. Make sure that the worker config files have the
|
||||||
|
exact same name as the worker app. Compare `matrix-synapse-worker@.service` for
|
||||||
|
why. You can find an example worker config in the [workers](workers) folder. See
|
||||||
|
below for relevant settings in the `homeserver.yaml`.
|
||||||
|
2. Copy the `*.service` and `*.target` files in [system](system) to
|
||||||
|
`/etc/systemd/system`.
|
||||||
|
3. `systemctl enable matrix-synapse.service` this adds the homeserver
|
||||||
|
app to the `matrix.target`
|
||||||
|
4. *Optional.* `systemctl enable
|
||||||
|
matrix-synapse-worker@federation_reader.service` this adds the federation_reader
|
||||||
|
app to the `matrix-synapse.service`
|
||||||
|
5. *Optional.* Repeat step 4 for any additional workers you require.
|
||||||
|
6. *Optional.* Add any bots or bridges by enabling them.
|
||||||
|
7. Start all matrix related services via `systemctl start matrix.target`
|
||||||
|
8. *Optional.* Enable autostart of all matrix related services on system boot
|
||||||
|
via `systemctl enable matrix.target`
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
After you have setup you can use the following commands to manage your synapse
|
||||||
|
installation:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Start matrix-synapse, all workers and any enabled bots or bridges.
|
||||||
|
systemctl start matrix.target
|
||||||
|
|
||||||
|
# Restart matrix-synapse and all workers (not necessarily restarting bots
|
||||||
|
# or bridges, see "Bots and Bridges")
|
||||||
|
systemctl restart matrix-synapse.service
|
||||||
|
|
||||||
|
# Stop matrix-synapse and all workers (not necessarily restarting bots
|
||||||
|
# or bridges, see "Bots and Bridges")
|
||||||
|
systemctl stop matrix-synapse.service
|
||||||
|
|
||||||
|
# Restart a specific worker (i. e. federation_reader), the homeserver is
|
||||||
|
# unaffected by this.
|
||||||
|
systemctl restart matrix-synapse-worker@federation_reader.service
|
||||||
|
|
||||||
|
# Add a new worker (assuming all configs are setup already)
|
||||||
|
systemctl enable matrix-synapse-worker@federation_writer.service
|
||||||
|
systemctl restart matrix-synapse.service
|
||||||
|
```
|
||||||
|
|
||||||
|
## The Configs
|
||||||
|
|
||||||
|
Make sure the `worker_app` is set in the `homeserver.yaml` and it does not fork.
|
||||||
|
|
||||||
|
```
|
||||||
|
worker_app: synapse.app.homeserver
|
||||||
|
daemonize: false
|
||||||
|
```
|
||||||
|
|
||||||
|
None of the workers should fork, as forking is handled by systemd. Hence make
|
||||||
|
sure this is present in all worker config files.
|
||||||
|
|
||||||
|
```
|
||||||
|
worker_daemonize: false
|
||||||
|
```
|
||||||
|
|
||||||
|
The config files of all workers are expected to be located in
|
||||||
|
`/etc/matrix-synapse/workers`. If you want to use a different location you have
|
||||||
|
to edit the provided `*.service` files accordingly.
|
||||||
|
|
||||||
|
## Bots and Bridges
|
||||||
|
|
||||||
|
Most bots and bridges do not care if the homeserver goes down or is restarted.
|
||||||
|
Depending on the implementation this may crash them though. So look up the docs
|
||||||
|
or ask the community of the specific bridge or bot you want to run to make sure
|
||||||
|
you choose the correct setup.
|
||||||
|
|
||||||
|
Whichever configuration you choose, after the setup the following will enable
|
||||||
|
automatically starting (and potentially restarting) your bot/bridge with the
|
||||||
|
`matrix.target`.
|
||||||
|
|
||||||
|
```
|
||||||
|
systemctl enable <yourBotOrBridgeName>.service
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note** that from an inactive synapse the bots/bridges will only be started with
|
||||||
|
synapse if you start the `matrix.target`, not if you start the
|
||||||
|
`matrix-synapse.service`. This is on purpose. Think of `matrix-synapse.service`
|
||||||
|
as *just* synapse, but `matrix.target` being anything matrix related, including
|
||||||
|
synapse and any and all enabled bots and bridges.
|
||||||
|
|
||||||
|
### Start with synapse but ignore synapse going down
|
||||||
|
|
||||||
|
If the bridge can handle shutdowns of the homeserver you'll want to install the
|
||||||
|
service in the `matrix.target` and optionally add a
|
||||||
|
`After=matrix-synapse.service` dependency to have the bot/bridge start after
|
||||||
|
synapse on starting everything.
|
||||||
|
|
||||||
|
In this case the service file should look like this.
|
||||||
|
|
||||||
|
```
|
||||||
|
[Unit]
|
||||||
|
# ...
|
||||||
|
# Optional, this will only ensure that if you start everything, synapse will
|
||||||
|
# be started before the bot/bridge will be started.
|
||||||
|
After=matrix-synapse.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
# ...
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=matrix.target
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stop/restart when synapse stops/restarts
|
||||||
|
|
||||||
|
If the bridge can't handle shutdowns of the homeserver you'll still want to
|
||||||
|
install the service in the `matrix.target` but also have to specify the
|
||||||
|
`After=matrix-synapse.service` *and* `BindsTo=matrix-synapse.service`
|
||||||
|
dependencies to have the bot/bridge stop/restart with synapse.
|
||||||
|
|
||||||
|
In this case the service file should look like this.
|
||||||
|
|
||||||
|
```
|
||||||
|
[Unit]
|
||||||
|
# ...
|
||||||
|
# Mandatory
|
||||||
|
After=matrix-synapse.service
|
||||||
|
BindsTo=matrix-synapse.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
# ...
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=matrix.target
|
||||||
|
```
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Synapse Matrix Worker
|
||||||
|
After=matrix-synapse.service
|
||||||
|
BindsTo=matrix-synapse.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=matrix-synapse
|
||||||
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
|
EnvironmentFile=/etc/default/matrix-synapse
|
||||||
|
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.%i --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --config-path=/etc/matrix-synapse/workers/%i.yaml
|
||||||
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
SyslogIdentifier=matrix-synapse-%i
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=matrix-synapse.service
|
||||||
+3
-8
@@ -1,13 +1,8 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=Synapse master
|
Description=Synapse Matrix Homeserver
|
||||||
|
|
||||||
# This service should be restarted when the synapse target is restarted.
|
|
||||||
PartOf=matrix-synapse.target
|
|
||||||
ReloadPropagatedFrom=matrix-synapse.target
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=simple
|
||||||
NotifyAccess=main
|
|
||||||
User=matrix-synapse
|
User=matrix-synapse
|
||||||
WorkingDirectory=/var/lib/matrix-synapse
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
EnvironmentFile=/etc/default/matrix-synapse
|
EnvironmentFile=/etc/default/matrix-synapse
|
||||||
@@ -19,4 +14,4 @@ RestartSec=3
|
|||||||
SyslogIdentifier=matrix-synapse
|
SyslogIdentifier=matrix-synapse
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=matrix-synapse.target
|
WantedBy=matrix.target
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Contains matrix services like synapse, bridges and bots
|
||||||
|
After=network.target
|
||||||
|
AllowIsolate=no
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
+2
-1
@@ -1,7 +1,7 @@
|
|||||||
worker_app: synapse.app.federation_reader
|
worker_app: synapse.app.federation_reader
|
||||||
worker_name: federation_reader1
|
|
||||||
|
|
||||||
worker_replication_host: 127.0.0.1
|
worker_replication_host: 127.0.0.1
|
||||||
|
worker_replication_port: 9092
|
||||||
worker_replication_http_port: 9093
|
worker_replication_http_port: 9093
|
||||||
|
|
||||||
worker_listeners:
|
worker_listeners:
|
||||||
@@ -10,4 +10,5 @@ worker_listeners:
|
|||||||
resources:
|
resources:
|
||||||
- names: [federation]
|
- names: [federation]
|
||||||
|
|
||||||
|
worker_daemonize: false
|
||||||
worker_log_config: /etc/matrix-synapse/federation-reader-log.yaml
|
worker_log_config: /etc/matrix-synapse/federation-reader-log.yaml
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Setup Synapse with Systemd
|
|
||||||
This is a setup for managing synapse with a user contributed systemd unit
|
|
||||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
|
||||||
to accommodate your installation in accordance with the installation
|
|
||||||
instructions provided in [installation instructions](../../INSTALL.md).
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
1. Under the service section, ensure the `User` variable matches which user
|
|
||||||
you installed synapse under and wish to run it as.
|
|
||||||
2. Under the service section, ensure the `WorkingDirectory` variable matches
|
|
||||||
where you have installed synapse.
|
|
||||||
3. Under the service section, ensure the `ExecStart` variable matches the
|
|
||||||
appropriate locations of your installation.
|
|
||||||
4. Copy the `matrix-synapse.service` to `/etc/systemd/system/`
|
|
||||||
5. Start Synapse: `sudo systemctl start matrix-synapse`
|
|
||||||
6. Verify Synapse is running: `sudo systemctl status matrix-synapse`
|
|
||||||
7. *optional* Enable Synapse to start at system boot: `sudo systemctl enable matrix-synapse`
|
|
||||||
@@ -4,32 +4,24 @@
|
|||||||
# systemctl enable matrix-synapse
|
# systemctl enable matrix-synapse
|
||||||
# systemctl start matrix-synapse
|
# systemctl start matrix-synapse
|
||||||
#
|
#
|
||||||
# This assumes that Synapse has been installed by a user named
|
|
||||||
# synapse.
|
|
||||||
#
|
|
||||||
# This assumes that Synapse has been installed in a virtualenv in
|
# This assumes that Synapse has been installed in a virtualenv in
|
||||||
# the user's home directory: `/home/synapse/synapse/env`.
|
# /opt/synapse/env.
|
||||||
#
|
#
|
||||||
# **NOTE:** This is an example service file that may change in the future. If you
|
# **NOTE:** This is an example service file that may change in the future. If you
|
||||||
# wish to use this please copy rather than symlink it.
|
# wish to use this please copy rather than symlink it.
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Synapse Matrix homeserver
|
Description=Synapse Matrix homeserver
|
||||||
# If you are using postgresql to persist data, uncomment this line to make sure
|
|
||||||
# synapse starts after the postgresql service.
|
|
||||||
# After=postgresql.service
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=simple
|
||||||
NotifyAccess=main
|
|
||||||
ExecReload=/bin/kill -HUP $MAINPID
|
|
||||||
Restart=on-abort
|
Restart=on-abort
|
||||||
|
|
||||||
User=synapse
|
User=synapse
|
||||||
Group=nogroup
|
Group=nogroup
|
||||||
|
|
||||||
WorkingDirectory=/home/synapse/synapse
|
WorkingDirectory=/opt/synapse
|
||||||
ExecStart=/home/synapse/synapse/env/bin/python -m synapse.app.homeserver --config-path=/home/synapse/synapse/homeserver.yaml
|
ExecStart=/opt/synapse/env/bin/python -m synapse.app.homeserver --config-path=/opt/synapse/homeserver.yaml
|
||||||
SyslogIdentifier=matrix-synapse
|
SyslogIdentifier=matrix-synapse
|
||||||
|
|
||||||
# adjust the cache factor if necessary
|
# adjust the cache factor if necessary
|
||||||
|
|||||||
Vendored
+11
-24
@@ -33,44 +33,34 @@ esac
|
|||||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||||
# than the 2/3 compatible `virtualenv`.
|
# than the 2/3 compatible `virtualenv`.
|
||||||
|
|
||||||
# Pin pip to 20.3.4 to fix breakage in 21.0 on py3.5 (xenial)
|
|
||||||
|
|
||||||
dh_virtualenv \
|
dh_virtualenv \
|
||||||
--install-suffix "matrix-synapse" \
|
--install-suffix "matrix-synapse" \
|
||||||
--builtin-venv \
|
--builtin-venv \
|
||||||
|
--setuptools \
|
||||||
--python "$SNAKE" \
|
--python "$SNAKE" \
|
||||||
--upgrade-pip-to="20.3.4" \
|
--upgrade-pip \
|
||||||
--preinstall="lxml" \
|
--preinstall="lxml" \
|
||||||
--preinstall="mock" \
|
--preinstall="mock" \
|
||||||
--extra-pip-arg="--no-cache-dir" \
|
--extra-pip-arg="--no-cache-dir" \
|
||||||
--extra-pip-arg="--compile" \
|
--extra-pip-arg="--compile" \
|
||||||
--extras="all,systemd,test"
|
--extras="all,systemd"
|
||||||
|
|
||||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||||
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
||||||
|
|
||||||
case "$DEB_BUILD_OPTIONS" in
|
# we copy the tests to a temporary directory so that we can put them on the
|
||||||
*nocheck*)
|
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
||||||
# Skip running tests if "nocheck" present in $DEB_BUILD_OPTIONS
|
tmpdir=`mktemp -d`
|
||||||
;;
|
trap "rm -r $tmpdir" EXIT
|
||||||
|
|
||||||
*)
|
cp -r tests "$tmpdir"
|
||||||
# Copy tests to a temporary directory so that we can put them on the
|
|
||||||
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
|
||||||
tmpdir=`mktemp -d`
|
|
||||||
trap "rm -r $tmpdir" EXIT
|
|
||||||
|
|
||||||
cp -r tests "$tmpdir"
|
PYTHONPATH="$tmpdir" \
|
||||||
|
"${TARGET_PYTHON}" -B -m twisted.trial --reporter=text -j2 tests
|
||||||
PYTHONPATH="$tmpdir" \
|
|
||||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
|
||||||
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# build the config file
|
# build the config file
|
||||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
|
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||||
--config-dir="/etc/matrix-synapse" \
|
--config-dir="/etc/matrix-synapse" \
|
||||||
--data-dir="/var/lib/matrix-synapse" |
|
--data-dir="/var/lib/matrix-synapse" |
|
||||||
perl -pe '
|
perl -pe '
|
||||||
@@ -95,9 +85,6 @@ esac
|
|||||||
|
|
||||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||||
|
|
||||||
# build the log config file
|
|
||||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
|
||||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
|
||||||
|
|
||||||
# add a dependency on the right version of python to substvars.
|
# add a dependency on the right version of python to substvars.
|
||||||
PYPKG=`basename $SNAKE`
|
PYPKG=`basename $SNAKE`
|
||||||
|
|||||||
Vendored
+2
-413
@@ -1,420 +1,9 @@
|
|||||||
matrix-synapse-py3 (1.32.2) stable; urgency=medium
|
matrix-synapse-py3 (1.1.0-1) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
* New synapse release 1.32.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Apr 2021 12:43:52 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.32.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 21 Apr 2021 14:00:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.32.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Apr 2021 14:28:39 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.31.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.31.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Apr 2021 13:08:29 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.30.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.30.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Mar 2021 12:01:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.30.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.30.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 22 Mar 2021 13:15:34 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.29.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Jonathan de Jong ]
|
|
||||||
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.29.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.28.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.28.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Feb 2021 10:21:57 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.27.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Fix build on Ubuntu 16.04 LTS (Xenial).
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.27.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Feb 2021 13:11:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.26.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Remove dependency on `python3-distutils`.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.26.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 27 Jan 2021 12:43:35 -0500
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Update dependencies to account for the removal of the transitional
|
|
||||||
dh-systemd package from Debian Bullseye.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.25.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Jan 2021 10:14:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.24.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.24.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:14:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.23.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.23.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:40:39 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.23.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.23.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Nov 2020 11:41:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.22.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.22.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Oct 2020 15:25:37 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.22.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.22.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Oct 2020 12:07:12 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.21.2) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.21.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 15 Oct 2020 09:23:27 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.21.1) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.21.1.
|
|
||||||
|
|
||||||
[ Andrew Morgan ]
|
|
||||||
* Explicitly install "test" python dependencies.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Oct 2020 10:24:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.21.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.21.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 12 Oct 2020 15:47:44 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.20.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.20.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Sep 2020 16:25:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.20.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.20.0.
|
|
||||||
|
|
||||||
[ Dexter Chua ]
|
|
||||||
* Use Type=notify in systemd service
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Sep 2020 15:19:32 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.19.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 18 Sep 2020 14:59:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.19.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 16 Sep 2020 12:50:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.19.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 27 Aug 2020 10:50:19 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.19.0.
|
|
||||||
|
|
||||||
[ Aaron Raimist ]
|
|
||||||
* Fix outdated documentation for SYNAPSE_CACHE_FACTOR
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Aug 2020 14:06:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.18.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.18.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 30 Jul 2020 10:55:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.17.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.17.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Jul 2020 10:20:31 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.16.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.16.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Jul 2020 12:09:24 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.17.0rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.17.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 09 Jul 2020 16:53:12 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.16.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.16.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 08 Jul 2020 11:03:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.15.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.15.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 02 Jul 2020 10:34:00 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.15.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.15.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Jun 2020 10:27:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.15.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.15.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 11 Jun 2020 13:27:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.14.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.14.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 28 May 2020 10:37:27 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.13.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Patrick Cloke ]
|
|
||||||
* Add information about .well-known files to Debian installation scripts.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.13.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 May 2020 09:16:56 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Apr 2020 10:58:14 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.3) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Update the Debian build scripts to handle the new installation paths
|
|
||||||
for the support libraries introduced by Pillow 7.1.1.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.12.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 03 Apr 2020 10:55:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Apr 2020 19:02:17 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Apr 2020 11:30:47 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 23 Mar 2020 12:13:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.11.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.11.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Mar 2020 15:01:22 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.11.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.11.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Feb 2020 08:54:34 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.10.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.10.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Feb 2020 16:27:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.10.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.10.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Feb 2020 12:18:54 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.9.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.9.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2020 13:09:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.9.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.9.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Jan 2020 12:56:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.8.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Automate generation of the default log configuration file.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.8.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 09 Jan 2020 11:39:27 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Dec 2019 10:45:04 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 20 Dec 2019 10:56:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Dec 2019 09:37:59 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 13 Dec 2019 10:19:38 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.6.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.6.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 28 Nov 2019 11:10:40 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.6.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.6.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2019 12:15:40 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.5.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.5.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2019 10:02:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.5.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.5.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2019 14:28:41 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.4.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.4.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 18 Oct 2019 10:13:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.4.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.4.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 03 Oct 2019 13:22:25 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.3.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.3.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Sat, 17 Aug 2019 09:15:49 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.3.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Andrew Morgan ]
|
|
||||||
* Remove libsqlite3-dev from required build dependencies.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.3.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 15 Aug 2019 12:04:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.2.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Amber Brown ]
|
[ Amber Brown ]
|
||||||
* Update logging config defaults to match API changes in Synapse.
|
* Update logging config defaults to match API changes in Synapse.
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
-- Erik Johnston <erikj@rae> Thu, 04 Jul 2019 13:59:02 +0100
|
||||||
* Add Recommends and Depends for some libraries which you probably want.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.2.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Jul 2019 14:10:07 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.1.0) stable; urgency=medium
|
matrix-synapse-py3 (1.1.0) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
Vendored
+3
-10
@@ -2,15 +2,10 @@ Source: matrix-synapse-py3
|
|||||||
Section: contrib/python
|
Section: contrib/python
|
||||||
Priority: extra
|
Priority: extra
|
||||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
|
||||||
# TODO: Remove the dependency on dh-systemd after dropping support for Ubuntu xenial
|
|
||||||
# On all other supported releases, it's merely a transitional package which
|
|
||||||
# does nothing but depends on debhelper (> 9.20160709)
|
|
||||||
Build-Depends:
|
Build-Depends:
|
||||||
debhelper (>= 9.20160709) | dh-systemd,
|
debhelper (>= 9),
|
||||||
|
dh-systemd,
|
||||||
dh-virtualenv (>= 1.1),
|
dh-virtualenv (>= 1.1),
|
||||||
libsystemd-dev,
|
|
||||||
libpq-dev,
|
|
||||||
lsb-release,
|
lsb-release,
|
||||||
python3-dev,
|
python3-dev,
|
||||||
python3,
|
python3,
|
||||||
@@ -31,13 +26,11 @@ Pre-Depends: dpkg (>= 1.16.1)
|
|||||||
Depends:
|
Depends:
|
||||||
adduser,
|
adduser,
|
||||||
debconf,
|
debconf,
|
||||||
|
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||||
${misc:Depends},
|
${misc:Depends},
|
||||||
${shlibs:Depends},
|
|
||||||
${synapse:pydepends},
|
${synapse:pydepends},
|
||||||
# some of our scripts use perl, but none of them are important,
|
# some of our scripts use perl, but none of them are important,
|
||||||
# so we put perl:Depends in Suggests rather than Depends.
|
# so we put perl:Depends in Suggests rather than Depends.
|
||||||
Recommends:
|
|
||||||
${shlibs1:Recommends},
|
|
||||||
Suggests:
|
Suggests:
|
||||||
sqlite3,
|
sqlite3,
|
||||||
${perl:Depends},
|
${perl:Depends},
|
||||||
|
|||||||
Vendored
+1
@@ -1 +1,2 @@
|
|||||||
|
debian/log.yaml etc/matrix-synapse
|
||||||
debian/manage_debconf.pl /opt/venvs/matrix-synapse/lib/
|
debian/manage_debconf.pl /opt/venvs/matrix-synapse/lib/
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user