Compare commits
2 Commits
dmr/typing
...
anoa/docs_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3360be1829 | ||
|
|
19ca533bcc |
@@ -1,4 +0,0 @@
|
||||
---
|
||||
title: CI run against latest deps is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
8
.ci/patch_for_twisted_trunk.sh
Executable file
8
.ci/patch_for_twisted_trunk.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Fetches a version of complement which best matches the current build.
|
||||
#
|
||||
# The tarball is unpacked into `./complement`.
|
||||
|
||||
set -e
|
||||
mkdir -p complement
|
||||
|
||||
# Pick an appropriate version of complement. Depending on whether this is a PR or release,
|
||||
# etc. we need to use different fallbacks:
|
||||
#
|
||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
||||
# for pull requests, otherwise GITHUB_REF).
|
||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||
# (GITHUB_BASE_REF for pull requests).
|
||||
# 3. Use the default complement branch ("HEAD").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
||||
# Skip empty branch names and merge commits.
|
||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
@@ -2,24 +2,29 @@
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
@@ -32,14 +37,14 @@ else
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||
# - installs the minimal system requirements, and poetry;
|
||||
# - patches the project definition file to refer to old versions only;
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
@@ -12,70 +9,12 @@ set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
python3 python3-dev python3-pip python3-venv \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
# TODO: in the future, we could use an implementation of
|
||||
# https://github.com/python-poetry/poetry/issues/3527
|
||||
# https://github.com/pypa/pip/issues/8085
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Make the pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pipx install poetry==1.1.12
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
exec tox -e py3-old
|
||||
|
||||
@@ -1,37 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - configures synapse and a postgres server.
|
||||
# - sets up synapse and deps
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
@@ -42,12 +48,12 @@ echo "--- Prepare empty SQLite database"
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
.ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
# things to include
|
||||
!docker
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!setup.py
|
||||
|
||||
**/__pycache__
|
||||
|
||||
@@ -6,6 +6,3 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
|
||||
# Update black to 20.8b1 (#9381).
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
|
||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
||||
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
||||
|
||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||
|
||||
This is a bug report template. By following the instructions below and
|
||||
filling out the sections with your information, you will help the us to get all
|
||||
the necessary data to fix your issue.
|
||||
|
||||
You can also preview your report before submitting it. You may remove sections
|
||||
that aren't relevant to your particular case.
|
||||
|
||||
Text between <!-- and --> marks will be invisible in the report.
|
||||
|
||||
-->
|
||||
|
||||
### Description
|
||||
|
||||
<!-- Describe here the problem that you are experiencing -->
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
- list the steps
|
||||
- that reproduce the bug
|
||||
- using hyphens as bullet points
|
||||
|
||||
<!--
|
||||
Describe how what happens differs from what you expected.
|
||||
|
||||
If you can identify any relevant log snippets from _homeserver.log_, please include
|
||||
those (please be careful to remove any personal or private data). Please surround them with
|
||||
``` (three backticks, on a line on their own), so that they are formatted legibly.
|
||||
-->
|
||||
|
||||
### Version information
|
||||
|
||||
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||
|
||||
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||
- **Homeserver**:
|
||||
|
||||
If not matrix.org:
|
||||
|
||||
<!--
|
||||
What version of Synapse is running?
|
||||
|
||||
You can find the Synapse version with this command:
|
||||
|
||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||
|
||||
(You may need to replace `localhost:8008` if Synapse is not configured to
|
||||
listen on that port.)
|
||||
-->
|
||||
- **Version**:
|
||||
|
||||
- **Install method**:
|
||||
<!-- examples: package manager/git clone/pip -->
|
||||
|
||||
- **Platform**:
|
||||
<!--
|
||||
Tell us about the environment in which your homeserver is operating
|
||||
distro, hardware, if it's running in a vm/container, etc.
|
||||
-->
|
||||
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Bug report
|
||||
description: Create a report to help us improve
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
||||
|
||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||
|
||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
||||
|
||||
You can also preview your report before submitting it.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe the problem that you are experiencing
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduction_steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Describe the series of steps that leads you to the problem.
|
||||
|
||||
Describe how what happens differs from what you expected.
|
||||
placeholder: Tell us what you see!
|
||||
value: |
|
||||
- list the steps
|
||||
- that reproduce the bug
|
||||
- using hyphens as bullet points
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
---
|
||||
|
||||
**IMPORTANT**: please answer the following questions, to help us narrow down the problem.
|
||||
- type: input
|
||||
id: homeserver
|
||||
attributes:
|
||||
label: Homeserver
|
||||
description: Which homeserver was this issue identified on? (matrix.org, another homeserver, etc)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Synapse Version
|
||||
description: |
|
||||
What version of Synapse is this homeserver running?
|
||||
|
||||
You can find the Synapse version by visiting https://yourserver.example.com/_matrix/federation/v1/version
|
||||
|
||||
or with this command:
|
||||
|
||||
```
|
||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||
```
|
||||
|
||||
(You may need to replace `localhost:8008` if Synapse is not configured to listen on that port.)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install_method
|
||||
attributes:
|
||||
label: Installation Method
|
||||
options:
|
||||
- Docker (matrixdotorg/synapse)
|
||||
- Debian packages from packages.matrix.org
|
||||
- pip (from PyPI)
|
||||
- Other (please mention below)
|
||||
- type: textarea
|
||||
id: platform
|
||||
attributes:
|
||||
label: Platform
|
||||
description: |
|
||||
Tell us about the environment in which your homeserver is operating...
|
||||
e.g. distro, hardware, if it's running in a vm/container, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
||||
This will be automatically formatted into code, so there is no need for backticks.
|
||||
|
||||
Please be careful to remove any personal or private data.
|
||||
|
||||
**Bug reports are usually very difficult to diagnose without logging.**
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: anything_else
|
||||
attributes:
|
||||
label: Anything else that would be useful to know?
|
||||
30
.github/workflows/docker.yml
vendored
30
.github/workflows/docker.yml
vendored
@@ -34,24 +34,32 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# TODO: consider using https://github.com/docker/metadata-action instead of this
|
||||
# custom magic
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@master
|
||||
with:
|
||||
images: matrixdotorg/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
run: |
|
||||
case "${GITHUB_REF}" in
|
||||
refs/heads/develop)
|
||||
tag=develop
|
||||
;;
|
||||
refs/heads/master|refs/heads/main)
|
||||
tag=latest
|
||||
;;
|
||||
refs/tags/*)
|
||||
tag=${GITHUB_REF#refs/tags/}
|
||||
;;
|
||||
*)
|
||||
tag=${GITHUB_SHA}
|
||||
;;
|
||||
esac
|
||||
echo "::set-output name=tag::$tag"
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
mdbook-version: '0.4.9'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
|
||||
159
.github/workflows/latest_deps.yml
vendored
159
.github/workflows/latest_deps.yml
vendored
@@ -1,159 +0,0 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
148
.github/workflows/tests.yml
vendored
148
.github/workflows/tests.yml
vendored
@@ -15,14 +15,25 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install .
|
||||
- run: pip install -e .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -61,23 +72,23 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
extras: ["all"]
|
||||
toxenv: ["py"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
extras: ""
|
||||
toxenv: "py-noextras"
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
extras: "all"
|
||||
toxenv: "py"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
extras: "all"
|
||||
toxenv: "py"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -89,16 +100,17 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: pip install tox
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
@@ -117,7 +129,6 @@ jobs:
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
@@ -125,11 +136,11 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -145,24 +156,23 @@ jobs:
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -261,10 +271,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
python-version: "3.9"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
@@ -299,63 +308,13 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
- name: "Set Go Version"
|
||||
run: |
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: "Install Complement Dependencies"
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
- name: Checkout complement
|
||||
run: synapse/.ci/scripts/checkout_complement.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# We only run the workers tests on `develop` for now, because they're too slow to wait for on PRs.
|
||||
# Sadly, you can't have an `if` condition on the value of a matrix, so this is a temporary, separate job for now.
|
||||
# GitHub Actions doesn't support YAML anchors, so it's full-on duplication for now.
|
||||
complement-developonly:
|
||||
if: "${{ !failure() && !cancelled() && (github.ref == 'refs/heads/develop') }}"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -382,13 +341,48 @@ jobs:
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
- name: Checkout complement
|
||||
run: synapse/.ci/scripts/checkout_complement.sh
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p complement
|
||||
# Attempt to use the version of complement which best matches the current
|
||||
# build. Depending on whether this is a PR or release, etc. we need to
|
||||
# use different fallbacks.
|
||||
#
|
||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
||||
# for pull requests, otherwise GITHUB_REF).
|
||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||
# (GITHUB_BASE_REF for pull requests).
|
||||
# 3. Use the default complement branch ("HEAD").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
||||
# Skip empty branch names and merge commits.
|
||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
# Run Complement
|
||||
- run: |
|
||||
set -o pipefail
|
||||
WORKERS=1 COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc2716,msc3030 ./tests/... 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
|
||||
48
.github/workflows/twisted_trunk.yml
vendored
48
.github/workflows/twisted_trunk.yml
vendored
@@ -6,27 +6,16 @@ on:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -34,15 +23,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
python-version: 3.7
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
@@ -67,23 +55,11 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -15,9 +15,6 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
@@ -33,9 +30,6 @@ __pycache__/
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
|
||||
10198
CHANGES.md
10198
CHANGES.md
File diff suppressed because it is too large
Load Diff
54
MANIFEST.in
Normal file
54
MANIFEST.in
Normal file
@@ -0,0 +1,54 @@
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
include .flake8
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune .ci
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune stubs
|
||||
37
README.rst
37
README.rst
@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
@@ -293,42 +293,39 @@ directory of your choice::
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,dev]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
pip install -e "module-name"
|
||||
|
||||
poetry run ./demo/start.sh
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
./demo/start.sh
|
||||
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
See the [demo documentation](https://matrix-org.github.io/synapse/develop/development/demo.html)
|
||||
for more information.
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
poetry run synapse_homeserver \
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
@@ -337,7 +334,7 @@ Running the unit tests
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
poetry run trial tests
|
||||
trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
@@ -33,7 +33,7 @@ site-url = "/synapse/"
|
||||
additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/section-headers.css",
|
||||
]
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
1
changelog.d/11700.removal
Normal file
1
changelog.d/11700.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove workaround introduced in Synapse 1.50.0 for Mjolnir compatibility. Breaks compatibility with Mjolnir 1.3.1 and earlier.
|
||||
1
changelog.d/11915.misc
Normal file
1
changelog.d/11915.misc
Normal file
@@ -0,0 +1 @@
|
||||
Simplify the `ApplicationService` class' set of public methods related to interest checking.
|
||||
1
changelog.d/11998.doc
Normal file
1
changelog.d/11998.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix complexity checking config example in [Resource Constrained Devices](https://matrix-org.github.io/synapse/v1.54/other/running_synapse_on_single_board_computers.html) docs page.
|
||||
1
changelog.d/12028.feature
Normal file
1
changelog.d/12028.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add third-party rules rules callbacks `check_can_shutdown_room` and `check_can_deactivate_user`.
|
||||
1
changelog.d/12042.misc
Normal file
1
changelog.d/12042.misc
Normal file
@@ -0,0 +1 @@
|
||||
Correct type hints for txredis.
|
||||
1
changelog.d/12090.bugfix
Normal file
1
changelog.d/12090.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse v1.48.0.
|
||||
1
changelog.d/12101.misc
Normal file
1
changelog.d/12101.misc
Normal file
@@ -0,0 +1 @@
|
||||
Limit the size of `aggregation_key` on annotations.
|
||||
1
changelog.d/12108.misc
Normal file
1
changelog.d/12108.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints to `tests/rest/client`.
|
||||
1
changelog.d/12113.bugfix
Normal file
1
changelog.d/12113.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12118.misc
Normal file
1
changelog.d/12118.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move scripts to Synapse package and expose as setuptools entry points.
|
||||
1
changelog.d/12121.bugfix
Normal file
1
changelog.d/12121.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12128.misc
Normal file
1
changelog.d/12128.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix data validation to compare to lists, not sequences.
|
||||
1
changelog.d/12130.bugfix
Normal file
1
changelog.d/12130.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12131.misc
Normal file
1
changelog.d/12131.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix CI not attaching source distributions and wheels to the GitHub releases.
|
||||
1
changelog.d/12132.feature
Normal file
1
changelog.d/12132.feature
Normal file
@@ -0,0 +1 @@
|
||||
Improve performance of logging in for large accounts.
|
||||
1
changelog.d/12135.feature
Normal file
1
changelog.d/12135.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add experimental env var `SYNAPSE_ASYNC_IO_REACTOR` that causes Synapse to use the asyncio reactor for Twisted.
|
||||
1
changelog.d/12136.misc
Normal file
1
changelog.d/12136.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused mocks from `test_typing`.
|
||||
1
changelog.d/12137.misc
Normal file
1
changelog.d/12137.misc
Normal file
@@ -0,0 +1 @@
|
||||
Give `scripts-dev` scripts suffixes for neater CI config.
|
||||
1
changelog.d/12138.removal
Normal file
1
changelog.d/12138.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove backwards compatibilty with pagination tokens from the `/relations` and `/aggregations` endpoints generated from Synapse < v1.52.0.
|
||||
1
changelog.d/12140.misc
Normal file
1
changelog.d/12140.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move `synctl` into `synapse._scripts` and expose as an entry point.
|
||||
1
changelog.d/12142.misc
Normal file
1
changelog.d/12142.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move the snapcraft configuration file to `contrib`.
|
||||
1
changelog.d/12143.doc
Normal file
1
changelog.d/12143.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve documentation for demo scripts.
|
||||
1
changelog.d/12144.misc
Normal file
1
changelog.d/12144.misc
Normal file
@@ -0,0 +1 @@
|
||||
Enable [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) Complement tests in CI.
|
||||
1
changelog.d/12145.misc
Normal file
1
changelog.d/12145.misc
Normal file
@@ -0,0 +1 @@
|
||||
Enable [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) Complement tests in CI.
|
||||
1
changelog.d/12146.misc
Normal file
1
changelog.d/12146.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints to `tests/rest`.
|
||||
1
changelog.d/12149.misc
Normal file
1
changelog.d/12149.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add test for `ObservableDeferred`'s cancellation behaviour.
|
||||
1
changelog.d/12150.misc
Normal file
1
changelog.d/12150.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `ParamSpec` in type hints for `synapse.logging.context`.
|
||||
1
changelog.d/12151.feature
Normal file
1
changelog.d/12151.feature
Normal file
@@ -0,0 +1 @@
|
||||
Support the stable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440): threads.
|
||||
1
changelog.d/12152.misc
Normal file
1
changelog.d/12152.misc
Normal file
@@ -0,0 +1 @@
|
||||
Prune unused jobs from `tox` config.
|
||||
1
changelog.d/12153.misc
Normal file
1
changelog.d/12153.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move CI checks out of tox, to facilitate a move to using poetry.
|
||||
1
changelog.d/12154.misc
Normal file
1
changelog.d/12154.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid generating state groups for local out-of-band leaves.
|
||||
1
changelog.d/12155.misc
Normal file
1
changelog.d/12155.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid trying to calculate the state at outlier events.
|
||||
1
changelog.d/12156.misc
Normal file
1
changelog.d/12156.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix some type annotations.
|
||||
1
changelog.d/12157.bugfix
Normal file
1
changelog.d/12157.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in #4864 whereby background updates are never run with the default background batch size.
|
||||
1
changelog.d/12159.misc
Normal file
1
changelog.d/12159.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints for `ObservableDeferred` attributes.
|
||||
1
changelog.d/12161.misc
Normal file
1
changelog.d/12161.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use a prebuilt Action for the `tests-done` CI job.
|
||||
1
changelog.d/12163.misc
Normal file
1
changelog.d/12163.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce number of DB queries made during processing of `/sync`.
|
||||
1
changelog.d/12173.misc
Normal file
1
changelog.d/12173.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid trying to calculate the state at outlier events.
|
||||
1
changelog.d/12175.bugfix
Normal file
1
changelog.d/12175.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse v1.41.0.
|
||||
1
changelog.d/12179.doc
Normal file
1
changelog.d/12179.doc
Normal file
@@ -0,0 +1 @@
|
||||
Updates to the Room DAG concepts development document.
|
||||
1
changelog.d/12182.misc
Normal file
1
changelog.d/12182.misc
Normal file
@@ -0,0 +1 @@
|
||||
Retry HTTP replication failures, this should prevent 502's when restarting stateful workers (main, event persisters, stream writers). Contributed by Nick @ Beeper.
|
||||
1
changelog.d/12187.misc
Normal file
1
changelog.d/12187.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused variables.
|
||||
1
changelog.d/12189.bugfix
Normal file
1
changelog.d/12189.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12192.misc
Normal file
1
changelog.d/12192.misc
Normal file
@@ -0,0 +1 @@
|
||||
Rename `HomeServer.get_tcp_replication` to `get_replication_command_handler`.
|
||||
1
changelog.d/12197.misc
Normal file
1
changelog.d/12197.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove some dead code.
|
||||
@@ -1 +0,0 @@
|
||||
Add tests for cancellation of `GET /rooms/$room_id/members` and `GET /rooms/$room_id/state` requests.
|
||||
@@ -1 +0,0 @@
|
||||
Add documentation for how to configure Synapse with Workers using Docker Compose. Includes example worker config and docker-compose.yaml. Contributed by @Thumbscrew.
|
||||
@@ -1 +0,0 @@
|
||||
Report login failures due to unknown third party identifiers in the same way as failures due to invalid passwords. This prevents an attacker from using the error response to determine if the identifier exists. Contributed by Daniel Aloni.
|
||||
@@ -1 +0,0 @@
|
||||
Merge the Complement testing Docker images into a single, multi-purpose image.
|
||||
@@ -1 +0,0 @@
|
||||
Clean up the test code for client disconnection.
|
||||
@@ -1 +0,0 @@
|
||||
Use lower isolation level when inserting read receipts to avoid serialization errors. Contributed by Nick @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Reduce the amount of state we pull from the DB.
|
||||
@@ -1 +0,0 @@
|
||||
Enable testing against PostgreSQL databases in Complement CI.
|
||||
@@ -1 +0,0 @@
|
||||
Fix an inaccurate comment.
|
||||
@@ -1 +0,0 @@
|
||||
Remove the `delete_device` method and always call `delete_devices`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.58 where Synapse would not report full version information when installed from a git checkout. This is a best-effort affair and not guaranteed to be stable.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.60 where Synapse would fail to start if the `sqlite3` module was not available.
|
||||
@@ -1 +0,0 @@
|
||||
Use a GitHub form for issues rather than a hard-to-read, easy-to-ignore template.
|
||||
@@ -1 +0,0 @@
|
||||
Move [MSC3715](https://github.com/matrix-org/matrix-spec-proposals/pull/3715) behind an experimental config flag.
|
||||
@@ -1 +0,0 @@
|
||||
Add type annotations to `tests.state.test_v2`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix documentation for running complement tests.
|
||||
@@ -1,2 +0,0 @@
|
||||
Fix a bug where non-standard information was required when requesting the `/hierarchy` API over federation. Introduced
|
||||
in Synapse v1.41.0.
|
||||
@@ -1 +0,0 @@
|
||||
Faster joins: add issue links to the TODO comments in the code.
|
||||
@@ -1 +0,0 @@
|
||||
Modernize the `contrib/graph/` scripts.
|
||||
@@ -1 +0,0 @@
|
||||
Ensure the [Poetry cheat sheet](https://matrix-org.github.io/synapse/develop/development/dependencies.html) is available in the online documentation.
|
||||
@@ -1 +0,0 @@
|
||||
Add type annotations to `tests.utils`.
|
||||
@@ -16,7 +16,6 @@
|
||||
|
||||
""" Starts a synapse client console. """
|
||||
import argparse
|
||||
import binascii
|
||||
import cmd
|
||||
import getpass
|
||||
import json
|
||||
@@ -27,8 +26,9 @@ import urllib
|
||||
from http import TwistedHttpClient
|
||||
from typing import Optional
|
||||
|
||||
import nacl.encoding
|
||||
import nacl.signing
|
||||
import urlparse
|
||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
|
||||
from twisted.internet import defer, reactor, threads
|
||||
@@ -41,6 +41,7 @@ TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
class SynapseCmd(cmd.Cmd):
|
||||
|
||||
"""Basic synapse command-line processor.
|
||||
|
||||
This processes commands from the user and calls the relevant HTTP methods.
|
||||
@@ -419,8 +420,8 @@ class SynapseCmd(cmd.Cmd):
|
||||
pubKey = None
|
||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||
if "public_key" in pubKeyObj:
|
||||
pubKey = decode_verify_key_bytes(
|
||||
NACL_ED25519, binascii.unhexlify(pubKeyObj["public_key"])
|
||||
pubKey = nacl.signing.VerifyKey(
|
||||
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||
)
|
||||
else:
|
||||
print("No public key found in pubkey response!")
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
# Setting up Synapse with Workers using Docker Compose
|
||||
|
||||
This directory describes how deploy and manage Synapse and workers via [Docker Compose](https://docs.docker.com/compose/).
|
||||
|
||||
Example worker configuration files can be found [here](workers).
|
||||
|
||||
All examples and snippets assume that your Synapse service is called `synapse` in your Docker Compose file.
|
||||
|
||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
||||
|
||||
## Worker Service Examples in Docker Compose
|
||||
|
||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
||||
|
||||
### Generic Worker Example
|
||||
|
||||
```yaml
|
||||
synapse-generic-worker-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-generic-worker-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
||||
start_period: "5s"
|
||||
interval: "15s"
|
||||
timeout: "5s"
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
||||
# Expose port if required so your reverse proxy can send requests to this worker
|
||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
||||
ports:
|
||||
- 8081:8081
|
||||
depends_on:
|
||||
- synapse
|
||||
```
|
||||
|
||||
### Federation Sender Example
|
||||
|
||||
Please note: The federation sender does not receive REST API calls so no exposed ports are required.
|
||||
|
||||
```yaml
|
||||
synapse-federation-sender-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-federation-sender-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
||||
healthcheck:
|
||||
disable: true
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
||||
depends_on:
|
||||
- synapse
|
||||
```
|
||||
|
||||
## `homeserver.yaml` Configuration
|
||||
|
||||
### Enable Redis
|
||||
|
||||
Locate the `redis` section of your `homeserver.yaml` and enable and configure it:
|
||||
|
||||
```yaml
|
||||
redis:
|
||||
enabled: true
|
||||
host: redis
|
||||
port: 6379
|
||||
# password: <secret_password>
|
||||
```
|
||||
|
||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
||||
|
||||
### Add a replication Listener
|
||||
|
||||
Locate the `listeners` section of your `homeserver.yaml` and add the following replication listener:
|
||||
|
||||
```yaml
|
||||
listeners:
|
||||
# Other listeners
|
||||
|
||||
- port: 9093
|
||||
type: http
|
||||
resources:
|
||||
- names: [replication]
|
||||
```
|
||||
|
||||
This listener is used by the workers for replication and is referred to in worker config files using the following settings:
|
||||
|
||||
```yaml
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
```
|
||||
|
||||
### Add Workers to `instance_map`
|
||||
|
||||
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
|
||||
|
||||
```yaml
|
||||
instance_map:
|
||||
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
|
||||
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
|
||||
port: 8034 # The port assigned to the replication listener in your worker config file
|
||||
synapse-federation-sender-1:
|
||||
host: synapse-federation-sender-1
|
||||
port: 8034
|
||||
```
|
||||
|
||||
### Configure Federation Senders
|
||||
|
||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
||||
|
||||
```yaml
|
||||
# This will disable federation sending on the main Synapse instance
|
||||
send_federation: false
|
||||
|
||||
federation_sender_instances:
|
||||
- synapse-federation-sender-1 # The worker_name setting in your federation sender worker configuration file
|
||||
```
|
||||
|
||||
## Other Worker types
|
||||
|
||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||
@@ -1,77 +0,0 @@
|
||||
networks:
|
||||
backend:
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/var/lib/postgresql/data:/var/lib/postgresql/data:rw
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
POSTGRES_DB: synapse
|
||||
POSTGRES_USER: synapse_user
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_INITDB_ARGS: --encoding=UTF8 --locale=C
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
|
||||
synapse:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw
|
||||
ports:
|
||||
- 8008:8008
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
SYNAPSE_CONFIG_DIR: /data
|
||||
SYNAPSE_CONFIG_PATH: /data/homeserver.yaml
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
synapse-generic-worker-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-generic-worker-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
||||
start_period: "5s"
|
||||
interval: "15s"
|
||||
timeout: "5s"
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
||||
# Expose port if required so your reverse proxy can send requests to this worker
|
||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
||||
ports:
|
||||
- 8081:8081
|
||||
depends_on:
|
||||
- synapse
|
||||
|
||||
synapse-federation-sender-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-federation-sender-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
||||
healthcheck:
|
||||
disable: true
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
||||
depends_on:
|
||||
- synapse
|
||||
@@ -1,14 +0,0 @@
|
||||
worker_app: synapse.app.federation_sender
|
||||
worker_name: synapse-federation-sender-1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
worker_log_config: /data/federation_sender.log.config
|
||||
@@ -1,19 +0,0 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: synapse-generic-worker-1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
- type: http
|
||||
port: 8081
|
||||
x_forwarded: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
worker_log_config: /data/worker.log.config
|
||||
165
contrib/experiments/cursesio.py
Normal file
165
contrib/experiments/cursesio.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import curses
|
||||
import curses.wrapper
|
||||
from curses.ascii import isprint
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
|
||||
class CursesStdIO:
|
||||
def __init__(self, stdscr, callback=None):
|
||||
self.statusText = "Synapse test app -"
|
||||
self.searchText = ""
|
||||
self.stdscr = stdscr
|
||||
|
||||
self.logLine = ""
|
||||
|
||||
self.callback = callback
|
||||
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
self.stdscr.nodelay(1) # Make non blocking
|
||||
|
||||
self.rows, self.cols = self.stdscr.getmaxyx()
|
||||
self.lines = []
|
||||
|
||||
curses.use_default_colors()
|
||||
|
||||
self.paintStatus(self.statusText)
|
||||
self.stdscr.refresh()
|
||||
|
||||
def set_callback(self, callback):
|
||||
self.callback = callback
|
||||
|
||||
def fileno(self):
|
||||
"""We want to select on FD 0"""
|
||||
return 0
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.close()
|
||||
|
||||
def print_line(self, text):
|
||||
"""add a line to the internal list of lines"""
|
||||
|
||||
self.lines.append(text)
|
||||
self.redraw()
|
||||
|
||||
def print_log(self, text):
|
||||
self.logLine = text
|
||||
self.redraw()
|
||||
|
||||
def redraw(self):
|
||||
"""method for redisplaying lines based on internal list of lines"""
|
||||
|
||||
self.stdscr.clear()
|
||||
self.paintStatus(self.statusText)
|
||||
i = 0
|
||||
index = len(self.lines) - 1
|
||||
while i < (self.rows - 3) and index >= 0:
|
||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||
i = i + 1
|
||||
index = index - 1
|
||||
|
||||
self.printLogLine(self.logLine)
|
||||
|
||||
self.stdscr.refresh()
|
||||
|
||||
def paintStatus(self, text):
|
||||
if len(text) > self.cols:
|
||||
raise RuntimeError("TextTooLongError")
|
||||
|
||||
self.stdscr.addstr(
|
||||
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def printLogLine(self, text):
|
||||
self.stdscr.addstr(
|
||||
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def doRead(self):
|
||||
"""Input is ready!"""
|
||||
curses.noecho()
|
||||
c = self.stdscr.getch() # read a character
|
||||
|
||||
if c == curses.KEY_BACKSPACE:
|
||||
self.searchText = self.searchText[:-1]
|
||||
|
||||
elif c == curses.KEY_ENTER or c == 10:
|
||||
text = self.searchText
|
||||
self.searchText = ""
|
||||
|
||||
self.print_line(">> %s" % text)
|
||||
|
||||
try:
|
||||
if self.callback:
|
||||
self.callback.on_line(text)
|
||||
except Exception as e:
|
||||
self.print_line(str(e))
|
||||
|
||||
self.stdscr.refresh()
|
||||
|
||||
elif isprint(c):
|
||||
if len(self.searchText) == self.cols - 2:
|
||||
return
|
||||
self.searchText = self.searchText + chr(c)
|
||||
|
||||
self.stdscr.addstr(
|
||||
self.rows - 1,
|
||||
0,
|
||||
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||
)
|
||||
|
||||
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||
self.stdscr.refresh()
|
||||
|
||||
def logPrefix(self):
|
||||
return "CursesStdIO"
|
||||
|
||||
def close(self):
|
||||
"""clean up"""
|
||||
|
||||
curses.nocbreak()
|
||||
self.stdscr.keypad(0)
|
||||
curses.echo()
|
||||
curses.endwin()
|
||||
|
||||
|
||||
class Callback:
|
||||
def __init__(self, stdio):
|
||||
self.stdio = stdio
|
||||
|
||||
def on_line(self, text):
|
||||
self.stdio.print_line(text)
|
||||
|
||||
|
||||
def main(stdscr):
|
||||
screen = CursesStdIO(stdscr) # create Screen object
|
||||
|
||||
callback = Callback(screen)
|
||||
|
||||
screen.set_callback(callback)
|
||||
|
||||
stdscr.refresh()
|
||||
reactor.addReader(screen)
|
||||
reactor.run()
|
||||
screen.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
curses.wrapper(main)
|
||||
367
contrib/experiments/test_messaging.py
Normal file
367
contrib/experiments/test_messaging.py
Normal file
@@ -0,0 +1,367 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
""" This is an example of using the server to server implementation to do a
|
||||
basic chat style thing. It accepts commands from stdin and outputs to stdout.
|
||||
|
||||
It assumes that ucids are of the form <user>@<domain>, and uses <domain> as
|
||||
the address of the remote home server to hit.
|
||||
|
||||
Usage:
|
||||
python test_messaging.py <port>
|
||||
|
||||
Currently assumes the local address is localhost:<port>
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import argparse
|
||||
import curses.wrapper
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import cursesio
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python import log
|
||||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
from synapse.federation import ReplicationHandler
|
||||
from synapse.federation.units import Pdu
|
||||
from synapse.util import origin_from_ucid
|
||||
|
||||
# from synapse.logging.utils import log_function
|
||||
|
||||
|
||||
logger = logging.getLogger("example")
|
||||
|
||||
|
||||
def excpetion_errback(failure):
|
||||
logging.exception(failure)
|
||||
|
||||
|
||||
class InputOutput:
|
||||
"""This is responsible for basic I/O so that a user can interact with
|
||||
the example app.
|
||||
"""
|
||||
|
||||
def __init__(self, screen, user):
|
||||
self.screen = screen
|
||||
self.user = user
|
||||
|
||||
def set_home_server(self, server):
|
||||
self.server = server
|
||||
|
||||
def on_line(self, line):
|
||||
"""This is where we process commands."""
|
||||
|
||||
try:
|
||||
m = re.match(r"^join (\S+)$", line)
|
||||
if m:
|
||||
# The `sender` wants to join a room.
|
||||
(room_name,) = m.groups()
|
||||
self.print_line("%s joining %s" % (self.user, room_name))
|
||||
self.server.join_room(room_name, self.user, self.user)
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match(r"^invite (\S+) (\S+)$", line)
|
||||
if m:
|
||||
# `sender` wants to invite someone to a room
|
||||
room_name, invitee = m.groups()
|
||||
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||
self.server.invite_to_room(room_name, self.user, invitee)
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match(r"^send (\S+) (.*)$", line)
|
||||
if m:
|
||||
# `sender` wants to message a room
|
||||
room_name, body = m.groups()
|
||||
self.print_line("%s send to %s" % (self.user, room_name))
|
||||
self.server.send_message(room_name, self.user, body)
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match(r"^backfill (\S+)$", line)
|
||||
if m:
|
||||
# we want to backfill a room
|
||||
(room_name,) = m.groups()
|
||||
self.print_line("backfill %s" % room_name)
|
||||
self.server.backfill(room_name)
|
||||
return
|
||||
|
||||
self.print_line("Unrecognized command")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
|
||||
def print_line(self, text):
|
||||
self.screen.print_line(text)
|
||||
|
||||
def print_log(self, text):
|
||||
self.screen.print_log(text)
|
||||
|
||||
|
||||
class IOLoggerHandler(logging.Handler):
|
||||
def __init__(self, io):
|
||||
logging.Handler.__init__(self)
|
||||
self.io = io
|
||||
|
||||
def emit(self, record):
|
||||
if record.levelno < logging.WARN:
|
||||
return
|
||||
|
||||
msg = self.format(record)
|
||||
self.io.print_log(msg)
|
||||
|
||||
|
||||
class Room:
|
||||
"""Used to store (in memory) the current membership state of a room, and
|
||||
which home servers we should send PDUs associated with the room to.
|
||||
"""
|
||||
|
||||
def __init__(self, room_name):
|
||||
self.room_name = room_name
|
||||
self.invited = set()
|
||||
self.participants = set()
|
||||
self.servers = set()
|
||||
|
||||
self.oldest_server = None
|
||||
|
||||
self.have_got_metadata = False
|
||||
|
||||
def add_participant(self, participant):
|
||||
"""Someone has joined the room"""
|
||||
self.participants.add(participant)
|
||||
self.invited.discard(participant)
|
||||
|
||||
server = origin_from_ucid(participant)
|
||||
self.servers.add(server)
|
||||
|
||||
if not self.oldest_server:
|
||||
self.oldest_server = server
|
||||
|
||||
def add_invited(self, invitee):
|
||||
"""Someone has been invited to the room"""
|
||||
self.invited.add(invitee)
|
||||
self.servers.add(origin_from_ucid(invitee))
|
||||
|
||||
|
||||
class HomeServer(ReplicationHandler):
|
||||
"""A very basic home server implentation that allows people to join a
|
||||
room and then invite other people.
|
||||
"""
|
||||
|
||||
def __init__(self, server_name, replication_layer, output):
|
||||
self.server_name = server_name
|
||||
self.replication_layer = replication_layer
|
||||
self.replication_layer.set_handler(self)
|
||||
|
||||
self.joined_rooms = {}
|
||||
|
||||
self.output = output
|
||||
|
||||
def on_receive_pdu(self, pdu):
|
||||
"""We just received a PDU"""
|
||||
pdu_type = pdu.pdu_type
|
||||
|
||||
if pdu_type == "sy.room.message":
|
||||
self._on_message(pdu)
|
||||
elif pdu_type == "sy.room.member" and "membership" in pdu.content:
|
||||
if pdu.content["membership"] == "join":
|
||||
self._on_join(pdu.context, pdu.state_key)
|
||||
elif pdu.content["membership"] == "invite":
|
||||
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||
else:
|
||||
self.output.print_line(
|
||||
"#%s (unrec) %s = %s"
|
||||
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||
)
|
||||
|
||||
def _on_message(self, pdu):
|
||||
"""We received a message"""
|
||||
self.output.print_line(
|
||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
|
||||
def _on_join(self, context, joinee):
|
||||
"""Someone has joined a room, either a remote user or a local user"""
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_participant(joinee)
|
||||
|
||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||
|
||||
def _on_invite(self, origin, context, invitee):
|
||||
"""Someone has been invited"""
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_invited(invitee)
|
||||
|
||||
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||
|
||||
if not room.have_got_metadata and origin is not self.server_name:
|
||||
logger.debug("Get room state")
|
||||
self.replication_layer.get_state_for_context(origin, context)
|
||||
room.have_got_metadata = True
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_message(self, room_name, sender, body):
|
||||
"""Send a message to a room!"""
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
|
||||
try:
|
||||
yield self.replication_layer.send_pdu(
|
||||
Pdu.create_new(
|
||||
context=room_name,
|
||||
pdu_type="sy.room.message",
|
||||
content={"sender": sender, "body": body},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def join_room(self, room_name, sender, joinee):
|
||||
"""Join a room!"""
|
||||
self._on_join(room_name, joinee)
|
||||
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
|
||||
try:
|
||||
pdu = Pdu.create_new(
|
||||
context=room_name,
|
||||
pdu_type="sy.room.member",
|
||||
is_state=True,
|
||||
state_key=joinee,
|
||||
content={"membership": "join"},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
yield self.replication_layer.send_pdu(pdu)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def invite_to_room(self, room_name, sender, invitee):
|
||||
"""Invite someone to a room!"""
|
||||
self._on_invite(self.server_name, room_name, invitee)
|
||||
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
|
||||
try:
|
||||
yield self.replication_layer.send_pdu(
|
||||
Pdu.create_new(
|
||||
context=room_name,
|
||||
is_state=True,
|
||||
pdu_type="sy.room.member",
|
||||
state_key=invitee,
|
||||
content={"membership": "invite"},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
|
||||
def backfill(self, room_name, limit=5):
|
||||
room = self.joined_rooms.get(room_name)
|
||||
|
||||
if not room:
|
||||
return
|
||||
|
||||
dest = room.oldest_server
|
||||
|
||||
return self.replication_layer.backfill(dest, room_name, limit)
|
||||
|
||||
def _get_room_remote_servers(self, room_name):
|
||||
return list(self.joined_rooms.setdefault(room_name).servers)
|
||||
|
||||
def _get_or_create_room(self, room_name):
|
||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||
|
||||
def get_servers_for_context(self, context):
|
||||
return defer.succeed(
|
||||
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||
)
|
||||
|
||||
|
||||
def main(stdscr):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("user", type=str)
|
||||
parser.add_argument("-v", "--verbose", action="count")
|
||||
args = parser.parse_args()
|
||||
|
||||
user = args.user
|
||||
server_name = origin_from_ucid(user)
|
||||
|
||||
# Set up logging
|
||||
|
||||
root_logger = logging.getLogger()
|
||||
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||
)
|
||||
if not os.path.exists("logs"):
|
||||
os.makedirs("logs")
|
||||
fh = logging.FileHandler("logs/%s" % user)
|
||||
fh.setFormatter(formatter)
|
||||
|
||||
root_logger.addHandler(fh)
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Hack: The only way to get it to stop logging to sys.stderr :(
|
||||
log.theLogPublisher.observers = []
|
||||
observer = log.PythonLoggingObserver()
|
||||
observer.start()
|
||||
|
||||
# Set up synapse server
|
||||
|
||||
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||
input_output = InputOutput(curses_stdio, user)
|
||||
|
||||
curses_stdio.set_callback(input_output)
|
||||
|
||||
app_hs = SynapseHomeServer(server_name, db_name="dbs/%s" % user)
|
||||
replication = app_hs.get_replication_layer()
|
||||
|
||||
hs = HomeServer(server_name, replication, curses_stdio)
|
||||
|
||||
input_output.set_home_server(hs)
|
||||
|
||||
# Add input_output logger
|
||||
io_logger = IOLoggerHandler(input_output)
|
||||
io_logger.setFormatter(formatter)
|
||||
root_logger.addHandler(io_logger)
|
||||
|
||||
# Start!
|
||||
|
||||
try:
|
||||
port = int(server_name.split(":")[1])
|
||||
except Exception:
|
||||
port = 12345
|
||||
|
||||
app_hs.get_http_server().start_listening(port)
|
||||
|
||||
reactor.addReader(curses_stdio)
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
curses.wrapper(main)
|
||||
@@ -66,18 +66,6 @@
|
||||
],
|
||||
"title": "Dashboards",
|
||||
"type": "dashboards"
|
||||
},
|
||||
{
|
||||
"asDropdown": false,
|
||||
"icon": "external link",
|
||||
"includeVars": false,
|
||||
"keepTime": false,
|
||||
"tags": [],
|
||||
"targetBlank": true,
|
||||
"title": "Synapse Documentation",
|
||||
"tooltip": "Open Documentation",
|
||||
"type": "link",
|
||||
"url": "https://matrix-org.github.io/synapse/latest/"
|
||||
}
|
||||
],
|
||||
"panels": [
|
||||
@@ -10901,4 +10889,4 @@
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 100
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,11 @@
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
import json
|
||||
|
||||
import pydot
|
||||
import urllib2
|
||||
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -12,25 +20,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
import json
|
||||
import urllib.request
|
||||
from typing import List
|
||||
|
||||
import pydot
|
||||
def make_name(pdu_id, origin):
|
||||
return "%s@%s" % (pdu_id, origin)
|
||||
|
||||
|
||||
def make_name(pdu_id: str, origin: str) -> str:
|
||||
return f"{pdu_id}@{origin}"
|
||||
|
||||
|
||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
"""
|
||||
Generate a dot and SVG file for a graph of events in the room based on the
|
||||
topological ordering by querying a homeserver.
|
||||
"""
|
||||
def make_graph(pdus, room, filename_prefix):
|
||||
pdu_map = {}
|
||||
node_map = {}
|
||||
|
||||
@@ -116,10 +111,10 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||
|
||||
|
||||
def get_pdus(host: str, room: str) -> List[dict]:
|
||||
def get_pdus(host, room):
|
||||
transaction = json.loads(
|
||||
urllib.request.urlopen(
|
||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||
urllib2.urlopen(
|
||||
"http://%s/_matrix/federation/v1/context/%s/" % (host, room)
|
||||
).read()
|
||||
)
|
||||
|
||||
@@ -146,4 +141,4 @@ if __name__ == "__main__":
|
||||
|
||||
pdus = get_pdus(host, room)
|
||||
|
||||
make_graph(pdus, prefix)
|
||||
make_graph(pdus, room, prefix)
|
||||
|
||||
@@ -14,31 +14,22 @@
|
||||
|
||||
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
import html
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
import pydot
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.events import make_event_from_dict
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
|
||||
|
||||
def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None:
|
||||
"""
|
||||
Generate a dot and SVG file for a graph of events in the room based on the
|
||||
topological ordering by reading from a Synapse SQLite database.
|
||||
"""
|
||||
def make_graph(db_name, room_id, file_prefix, limit):
|
||||
conn = sqlite3.connect(db_name)
|
||||
|
||||
sql = "SELECT room_version FROM rooms WHERE room_id = ?"
|
||||
c = conn.execute(sql, (room_id,))
|
||||
room_version = KNOWN_ROOM_VERSIONS[c.fetchone()[0]]
|
||||
|
||||
sql = (
|
||||
"SELECT json, internal_metadata FROM event_json as j "
|
||||
"SELECT json FROM event_json as j "
|
||||
"INNER JOIN events as e ON e.event_id = j.event_id "
|
||||
"WHERE j.room_id = ?"
|
||||
)
|
||||
@@ -52,10 +43,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
||||
|
||||
c = conn.execute(sql, args)
|
||||
|
||||
events = [
|
||||
make_event_from_dict(json.loads(e[0]), room_version, json.loads(e[1]))
|
||||
for e in c.fetchall()
|
||||
]
|
||||
events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
|
||||
|
||||
events.sort(key=lambda e: e.depth)
|
||||
|
||||
@@ -96,7 +84,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
||||
"name": event.event_id,
|
||||
"type": event.type,
|
||||
"state_key": event.get("state_key", None),
|
||||
"content": html.escape(content, quote=True),
|
||||
"content": cgi.escape(content, quote=True),
|
||||
"time": t,
|
||||
"depth": event.depth,
|
||||
"state_group": state_group,
|
||||
@@ -108,11 +96,11 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
||||
graph.add_node(node)
|
||||
|
||||
for event in events:
|
||||
for prev_id in event.prev_event_ids():
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except Exception:
|
||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
@@ -124,7 +112,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
||||
if len(event_ids) <= 1:
|
||||
continue
|
||||
|
||||
cluster = pydot.Cluster(str(group), label=f"<State Group: {str(group)}>")
|
||||
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||
|
||||
for event_id in event_ids:
|
||||
cluster.add_node(node_map[event_id])
|
||||
@@ -138,7 +126,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given Synapse SQLite file to get the list of PDUs. \n"
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
|
||||
@@ -1,3 +1,13 @@
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
|
||||
import pydot
|
||||
import simplejson as json
|
||||
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -12,35 +22,15 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import html
|
||||
import json
|
||||
|
||||
import pydot
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.events import make_event_from_dict
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
|
||||
|
||||
def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
||||
"""
|
||||
Generate a dot and SVG file for a graph of events in the room based on the
|
||||
topological ordering by reading line-delimited JSON from a file.
|
||||
"""
|
||||
def make_graph(file_name, room_id, file_prefix, limit):
|
||||
print("Reading lines")
|
||||
with open(file_name) as f:
|
||||
lines = f.readlines()
|
||||
|
||||
print("Read lines")
|
||||
|
||||
# Figure out the room version, assume the first line is the create event.
|
||||
room_version = KNOWN_ROOM_VERSIONS[
|
||||
json.loads(lines[0]).get("content", {}).get("room_version")
|
||||
]
|
||||
|
||||
events = [make_event_from_dict(json.loads(line), room_version) for line in lines]
|
||||
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||
|
||||
print("Loaded events.")
|
||||
|
||||
@@ -76,8 +66,8 @@ def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
||||
content.append(
|
||||
"<b>%s</b>: %s,"
|
||||
% (
|
||||
html.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
html.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -111,11 +101,11 @@ def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
||||
print("Created Nodes")
|
||||
|
||||
for event in events:
|
||||
for prev_id in event.prev_event_ids():
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except Exception:
|
||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
@@ -149,7 +139,8 @@ if __name__ == "__main__":
|
||||
)
|
||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||
parser.add_argument("event_file")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
make_graph(args.event_file, args.prefix, args.limit)
|
||||
make_graph(args.event_file, args.room, args.prefix, args.limit)
|
||||
|
||||
295
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
295
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
This is an attempt at bridging matrix clients into a Jitis meet room via Matrix
|
||||
video call. It uses hard-coded xml strings overg XMPP BOSH. It can display one
|
||||
of the streams from the Jitsi bridge until the second lot of SDP comes down and
|
||||
we set the remote SDP at which point the stream ends. Our video never gets to
|
||||
the bridge.
|
||||
|
||||
Requires:
|
||||
npm install jquery jsdom
|
||||
"""
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import gevent
|
||||
import grequests
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
|
||||
ACCESS_TOKEN = ""
|
||||
|
||||
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||
MYUSERNAME = "@davetest:matrix.org"
|
||||
|
||||
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
# ROOMNAME = "matrix"
|
||||
ROOMNAME = "pibble"
|
||||
|
||||
HOST = "guest.jit.si"
|
||||
# HOST="jitsi.vuc.me"
|
||||
|
||||
TURNSERVER = "turn.guest.jit.si"
|
||||
# TURNSERVER="turn.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN = "meet.jit.si"
|
||||
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
|
||||
class TrivialMatrixClient:
|
||||
def __init__(self, access_token):
|
||||
self.token = None
|
||||
self.access_token = access_token
|
||||
|
||||
def getEvent(self):
|
||||
while True:
|
||||
url = (
|
||||
MATRIXBASE
|
||||
+ "events?access_token="
|
||||
+ self.access_token
|
||||
+ "&timeout=60000"
|
||||
)
|
||||
if self.token:
|
||||
url += "&from=" + self.token
|
||||
req = grequests.get(url)
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print("incoming from matrix", obj)
|
||||
if "end" not in obj:
|
||||
continue
|
||||
self.token = obj["end"]
|
||||
if len(obj["chunk"]):
|
||||
return obj["chunk"][0]
|
||||
|
||||
def joinRoom(self, roomId):
|
||||
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||
print(url)
|
||||
headers = {"Content-Type": "application/json"}
|
||||
req = grequests.post(url, headers=headers, data="{}")
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print("response: ", obj)
|
||||
|
||||
def sendEvent(self, roomId, evType, event):
|
||||
url = (
|
||||
MATRIXBASE
|
||||
+ "rooms/"
|
||||
+ roomId
|
||||
+ "/send/"
|
||||
+ evType
|
||||
+ "?access_token="
|
||||
+ self.access_token
|
||||
)
|
||||
print(url)
|
||||
print(json.dumps(event))
|
||||
headers = {"Content-Type": "application/json"}
|
||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print("response: ", obj)
|
||||
|
||||
|
||||
xmppClients = {}
|
||||
|
||||
|
||||
def matrixLoop():
|
||||
while True:
|
||||
ev = matrixCli.getEvent()
|
||||
print(ev)
|
||||
if ev["type"] == "m.room.member":
|
||||
print("membership event")
|
||||
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||
roomId = ev["room_id"]
|
||||
print("joining room %s" % (roomId))
|
||||
matrixCli.joinRoom(roomId)
|
||||
elif ev["type"] == "m.room.message":
|
||||
if ev["room_id"] in xmppClients:
|
||||
print("already have a bridge for that user, ignoring")
|
||||
continue
|
||||
print("got message, connecting")
|
||||
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||
elif ev["type"] == "m.call.invite":
|
||||
print("Incoming call")
|
||||
# sdp = ev['content']['offer']['sdp']
|
||||
# print "sdp: %s" % (sdp)
|
||||
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev["type"] == "m.call.answer":
|
||||
print("Call answered")
|
||||
sdp = ev["content"]["answer"]["sdp"]
|
||||
if ev["room_id"] not in xmppClients:
|
||||
print("We didn't have a call for that room")
|
||||
continue
|
||||
# should probably check call ID too
|
||||
xmppCli = xmppClients[ev["room_id"]]
|
||||
xmppCli.sendAnswer(sdp)
|
||||
elif ev["type"] == "m.call.hangup":
|
||||
if ev["room_id"] in xmppClients:
|
||||
xmppClients[ev["room_id"]].stop()
|
||||
del xmppClients[ev["room_id"]]
|
||||
|
||||
|
||||
class TrivialXmppClient:
|
||||
def __init__(self, matrixRoom, userId):
|
||||
self.rid = 0
|
||||
self.matrixRoom = matrixRoom
|
||||
self.userId = userId
|
||||
self.running = True
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
|
||||
def nextRid(self):
|
||||
self.rid += 1
|
||||
return "%d" % (self.rid)
|
||||
|
||||
def sendIq(self, xml):
|
||||
fullXml = (
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||
% (self.nextRid(), self.sid, xml)
|
||||
)
|
||||
# print "\t>>>%s" % (fullXml)
|
||||
return self.xmppPoke(fullXml)
|
||||
|
||||
def xmppPoke(self, xml):
|
||||
headers = {"Content-Type": "application/xml"}
|
||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||
resps = grequests.map([req])
|
||||
obj = BeautifulSoup(resps[0].content)
|
||||
return obj
|
||||
|
||||
def sendAnswer(self, answer):
|
||||
print("sdp from matrix client", answer)
|
||||
p = subprocess.Popen(
|
||||
["node", "unjingle/unjingle.js", "--sdp"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
jingle, out_err = p.communicate(answer)
|
||||
jingle = jingle % {
|
||||
"tojid": self.callfrom,
|
||||
"action": "session-accept",
|
||||
"initiator": self.callfrom,
|
||||
"responder": self.jid,
|
||||
"sid": self.callsid,
|
||||
}
|
||||
print("answer jingle from sdp", jingle)
|
||||
res = self.sendIq(jingle)
|
||||
print("reply from answer: ", res)
|
||||
|
||||
self.ssrcs = {}
|
||||
jingleSoup = BeautifulSoup(jingle)
|
||||
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||
if cont.description:
|
||||
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||
print("my ssrcs:", self.ssrcs)
|
||||
|
||||
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||
|
||||
def advertiseSsrcs(self):
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
def xmppLoop(self):
|
||||
self.matrixCallId = time.time()
|
||||
res = self.xmppPoke(
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||
% (self.nextRid(), HOST)
|
||||
)
|
||||
|
||||
print(res)
|
||||
self.sid = res.body["sid"]
|
||||
print("sid %s" % (self.sid))
|
||||
|
||||
res = self.sendIq(
|
||||
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||
)
|
||||
|
||||
res = self.xmppPoke(
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||
% (self.nextRid(), self.sid, HOST)
|
||||
)
|
||||
|
||||
res = self.sendIq(
|
||||
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||
)
|
||||
print(res)
|
||||
|
||||
self.jid = res.body.iq.bind.jid.string
|
||||
print("jid: %s" % (self.jid))
|
||||
self.shortJid = self.jid.split("-")[0]
|
||||
|
||||
res = self.sendIq(
|
||||
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||
)
|
||||
|
||||
# randomthing = res.body.iq['to']
|
||||
# whatsitpart = randomthing.split('-')[0]
|
||||
|
||||
# print "other random bind thing: %s" % (randomthing)
|
||||
|
||||
# advertise preence to the jitsi room, with our nick
|
||||
res = self.sendIq(
|
||||
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||
)
|
||||
self.muc = {"users": []}
|
||||
for p in res.body.findAll("presence"):
|
||||
u = {}
|
||||
u["shortJid"] = p["from"].split("/")[1]
|
||||
if p.c and p.c.nick:
|
||||
u["nick"] = p.c.nick.string
|
||||
self.muc["users"].append(u)
|
||||
print("muc: ", self.muc)
|
||||
|
||||
# wait for stuff
|
||||
while True:
|
||||
print("waiting...")
|
||||
res = self.sendIq("")
|
||||
print("got from stream: ", res)
|
||||
if res.body.iq:
|
||||
jingles = res.body.iq.findAll("jingle")
|
||||
if len(jingles):
|
||||
self.callfrom = res.body.iq["from"]
|
||||
self.handleInvite(jingles[0])
|
||||
elif "type" in res.body and res.body["type"] == "terminate":
|
||||
self.running = False
|
||||
del xmppClients[self.matrixRoom]
|
||||
return
|
||||
|
||||
def handleInvite(self, jingle):
|
||||
self.initiator = jingle["initiator"]
|
||||
self.callsid = jingle["sid"]
|
||||
p = subprocess.Popen(
|
||||
["node", "unjingle/unjingle.js", "--jingle"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
print("raw jingle invite", str(jingle))
|
||||
sdp, out_err = p.communicate(str(jingle))
|
||||
print("transformed remote offer sdp", sdp)
|
||||
inviteEvent = {
|
||||
"offer": {"type": "offer", "sdp": sdp},
|
||||
"call_id": self.matrixCallId,
|
||||
"version": 0,
|
||||
"lifetime": 30000,
|
||||
}
|
||||
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||
|
||||
|
||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||
|
||||
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
@@ -0,0 +1,188 @@
|
||||
diff --git a/syweb/webclient/app/components/matrix/matrix-call.js b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
index 9fbfff0..dc68077 100644
|
||||
--- a/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
+++ b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
@@ -16,6 +16,45 @@ limitations under the License.
|
||||
|
||||
'use strict';
|
||||
|
||||
+
|
||||
+function sendKeyframe(pc) {
|
||||
+ console.log('sendkeyframe', pc.iceConnectionState);
|
||||
+ if (pc.iceConnectionState !== 'connected') return; // safe...
|
||||
+ pc.setRemoteDescription(
|
||||
+ pc.remoteDescription,
|
||||
+ function () {
|
||||
+ pc.createAnswer(
|
||||
+ function (modifiedAnswer) {
|
||||
+ pc.setLocalDescription(
|
||||
+ modifiedAnswer,
|
||||
+ function () {
|
||||
+ // noop
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe setLocalDescription failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe createAnswer failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe setRemoteDescription failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+}
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
var forAllVideoTracksOnStream = function(s, f) {
|
||||
var tracks = s.getVideoTracks();
|
||||
for (var i = 0; i < tracks.length; i++) {
|
||||
@@ -83,7 +122,7 @@ angular.module('MatrixCall', [])
|
||||
}
|
||||
|
||||
// FIXME: we should prevent any calls from being placed or accepted before this has finished
|
||||
- MatrixCall.getTurnServer();
|
||||
+ //MatrixCall.getTurnServer();
|
||||
|
||||
MatrixCall.CALL_TIMEOUT = 60000;
|
||||
MatrixCall.FALLBACK_STUN_SERVER = 'stun:stun.l.google.com:19302';
|
||||
@@ -132,6 +171,22 @@ angular.module('MatrixCall', [])
|
||||
pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); };
|
||||
pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); };
|
||||
pc.onaddstream = function(s) { self.onAddStream(s); };
|
||||
+
|
||||
+ var datachan = pc.createDataChannel('RTCDataChannel', {
|
||||
+ reliable: false
|
||||
+ });
|
||||
+ console.log("data chan: "+datachan);
|
||||
+ datachan.onopen = function() {
|
||||
+ console.log("data channel open");
|
||||
+ };
|
||||
+ datachan.onmessage = function() {
|
||||
+ console.log("data channel message");
|
||||
+ };
|
||||
+ pc.ondatachannel = function(event) {
|
||||
+ console.log("have data channel");
|
||||
+ event.channel.binaryType = 'blob';
|
||||
+ };
|
||||
+
|
||||
return pc;
|
||||
}
|
||||
|
||||
@@ -200,6 +255,12 @@ angular.module('MatrixCall', [])
|
||||
}, this.msg.lifetime - event.age);
|
||||
};
|
||||
|
||||
+ MatrixCall.prototype.receivedInvite = function(event) {
|
||||
+ console.log("Got second invite for call "+this.call_id);
|
||||
+ this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError);
|
||||
+ };
|
||||
+
|
||||
+
|
||||
// perverse as it may seem, sometimes we want to instantiate a call with a hangup message
|
||||
// (because when getting the state of the room on load, events come in reverse order and
|
||||
// we want to remember that a call has been hung up)
|
||||
@@ -349,7 +410,7 @@ angular.module('MatrixCall', [])
|
||||
'mandatory': {
|
||||
'OfferToReceiveAudio': true,
|
||||
'OfferToReceiveVideo': this.type == 'video'
|
||||
- },
|
||||
+ }
|
||||
};
|
||||
this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints);
|
||||
// This can't be in an apply() because it's called by a predecessor call under glare conditions :(
|
||||
@@ -359,8 +420,20 @@ angular.module('MatrixCall', [])
|
||||
MatrixCall.prototype.gotLocalIceCandidate = function(event) {
|
||||
if (event.candidate) {
|
||||
console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate);
|
||||
- this.sendCandidate(event.candidate);
|
||||
- }
|
||||
+ //this.sendCandidate(event.candidate);
|
||||
+ } else {
|
||||
+ console.log("have all candidates, sending answer");
|
||||
+ var content = {
|
||||
+ version: 0,
|
||||
+ call_id: this.call_id,
|
||||
+ answer: this.peerConn.localDescription
|
||||
+ };
|
||||
+ this.sendEventWithRetry('m.call.answer', content);
|
||||
+ var self = this;
|
||||
+ $rootScope.$apply(function() {
|
||||
+ self.state = 'connecting';
|
||||
+ });
|
||||
+ }
|
||||
}
|
||||
|
||||
MatrixCall.prototype.gotRemoteIceCandidate = function(cand) {
|
||||
@@ -418,15 +491,6 @@ angular.module('MatrixCall', [])
|
||||
console.log("Created answer: "+description);
|
||||
var self = this;
|
||||
this.peerConn.setLocalDescription(description, function() {
|
||||
- var content = {
|
||||
- version: 0,
|
||||
- call_id: self.call_id,
|
||||
- answer: self.peerConn.localDescription
|
||||
- };
|
||||
- self.sendEventWithRetry('m.call.answer', content);
|
||||
- $rootScope.$apply(function() {
|
||||
- self.state = 'connecting';
|
||||
- });
|
||||
}, function() { console.log("Error setting local description!"); } );
|
||||
};
|
||||
|
||||
@@ -448,6 +512,9 @@ angular.module('MatrixCall', [])
|
||||
$rootScope.$apply(function() {
|
||||
self.state = 'connected';
|
||||
self.didConnect = true;
|
||||
+ /*$timeout(function() {
|
||||
+ sendKeyframe(self.peerConn);
|
||||
+ }, 1000);*/
|
||||
});
|
||||
} else if (this.peerConn.iceConnectionState == 'failed') {
|
||||
this.hangup('ice_failed');
|
||||
@@ -518,6 +585,7 @@ angular.module('MatrixCall', [])
|
||||
|
||||
MatrixCall.prototype.onRemoteStreamEnded = function(event) {
|
||||
console.log("Remote stream ended");
|
||||
+ return;
|
||||
var self = this;
|
||||
$rootScope.$apply(function() {
|
||||
self.state = 'ended';
|
||||
diff --git a/syweb/webclient/app/components/matrix/matrix-phone-service.js b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
index 55dbbf5..272fa27 100644
|
||||
--- a/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
+++ b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
@@ -48,6 +48,13 @@ angular.module('matrixPhoneService', [])
|
||||
return;
|
||||
}
|
||||
|
||||
+ // do we already have an entry for this call ID?
|
||||
+ var existingEntry = matrixPhoneService.allCalls[msg.call_id];
|
||||
+ if (existingEntry) {
|
||||
+ existingEntry.receivedInvite(msg);
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
var call = undefined;
|
||||
if (!isLive) {
|
||||
// if this event wasn't live then this call may already be over
|
||||
@@ -108,7 +115,7 @@ angular.module('matrixPhoneService', [])
|
||||
call.hangup();
|
||||
}
|
||||
} else {
|
||||
- $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||
+ $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||
}
|
||||
} else if (event.type == 'm.call.answer') {
|
||||
var call = matrixPhoneService.allCalls[msg.call_id];
|
||||
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
@@ -0,0 +1,712 @@
|
||||
/* jshint -W117 */
|
||||
// SDP STUFF
|
||||
function SDP(sdp) {
|
||||
this.media = sdp.split('\r\nm=');
|
||||
for (var i = 1; i < this.media.length; i++) {
|
||||
this.media[i] = 'm=' + this.media[i];
|
||||
if (i != this.media.length - 1) {
|
||||
this.media[i] += '\r\n';
|
||||
}
|
||||
}
|
||||
this.session = this.media.shift() + '\r\n';
|
||||
this.raw = this.session + this.media.join('');
|
||||
}
|
||||
|
||||
exports.SDP = SDP;
|
||||
|
||||
var jsdom = require("jsdom");
|
||||
var window = jsdom.jsdom().parentWindow;
|
||||
var $ = require('jquery')(window);
|
||||
|
||||
var SDPUtil = require('./strophe.jingle.sdp.util.js').SDPUtil;
|
||||
|
||||
/**
|
||||
* Returns map of MediaChannel mapped per channel idx.
|
||||
*/
|
||||
SDP.prototype.getMediaSsrcMap = function() {
|
||||
var self = this;
|
||||
var media_ssrcs = {};
|
||||
for (channelNum = 0; channelNum < self.media.length; channelNum++) {
|
||||
modified = true;
|
||||
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc:');
|
||||
var type = SDPUtil.parse_mid(SDPUtil.find_line(self.media[channelNum], 'a=mid:'));
|
||||
var channel = new MediaChannel(channelNum, type);
|
||||
media_ssrcs[channelNum] = channel;
|
||||
tmp.forEach(function (line) {
|
||||
var linessrc = line.substring(7).split(' ')[0];
|
||||
// allocate new ChannelSsrc
|
||||
if(!channel.ssrcs[linessrc]) {
|
||||
channel.ssrcs[linessrc] = new ChannelSsrc(linessrc, type);
|
||||
}
|
||||
channel.ssrcs[linessrc].lines.push(line);
|
||||
});
|
||||
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc-group:');
|
||||
tmp.forEach(function(line){
|
||||
var semantics = line.substr(0, idx).substr(13);
|
||||
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||
if (ssrcs.length != 0) {
|
||||
var ssrcGroup = new ChannelSsrcGroup(semantics, ssrcs);
|
||||
channel.ssrcGroups.push(ssrcGroup);
|
||||
}
|
||||
});
|
||||
}
|
||||
return media_ssrcs;
|
||||
};
|
||||
/**
|
||||
* Returns <tt>true</tt> if this SDP contains given SSRC.
|
||||
* @param ssrc the ssrc to check.
|
||||
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
|
||||
*/
|
||||
SDP.prototype.containsSSRC = function(ssrc) {
|
||||
var channels = this.getMediaSsrcMap();
|
||||
var contains = false;
|
||||
Object.keys(channels).forEach(function(chNumber){
|
||||
var channel = channels[chNumber];
|
||||
//console.log("Check", channel, ssrc);
|
||||
if(Object.keys(channel.ssrcs).indexOf(ssrc) != -1){
|
||||
contains = true;
|
||||
}
|
||||
});
|
||||
return contains;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns map of MediaChannel that contains only media not contained in <tt>otherSdp</tt>. Mapped by channel idx.
|
||||
* @param otherSdp the other SDP to check ssrc with.
|
||||
*/
|
||||
SDP.prototype.getNewMedia = function(otherSdp) {
|
||||
|
||||
// this could be useful in Array.prototype.
|
||||
function arrayEquals(array) {
|
||||
// if the other array is a falsy value, return
|
||||
if (!array)
|
||||
return false;
|
||||
|
||||
// compare lengths - can save a lot of time
|
||||
if (this.length != array.length)
|
||||
return false;
|
||||
|
||||
for (var i = 0, l=this.length; i < l; i++) {
|
||||
// Check if we have nested arrays
|
||||
if (this[i] instanceof Array && array[i] instanceof Array) {
|
||||
// recurse into the nested arrays
|
||||
if (!this[i].equals(array[i]))
|
||||
return false;
|
||||
}
|
||||
else if (this[i] != array[i]) {
|
||||
// Warning - two different object instances will never be equal: {x:20} != {x:20}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
var myMedia = this.getMediaSsrcMap();
|
||||
var othersMedia = otherSdp.getMediaSsrcMap();
|
||||
var newMedia = {};
|
||||
Object.keys(othersMedia).forEach(function(channelNum) {
|
||||
var myChannel = myMedia[channelNum];
|
||||
var othersChannel = othersMedia[channelNum];
|
||||
if(!myChannel && othersChannel) {
|
||||
// Add whole channel
|
||||
newMedia[channelNum] = othersChannel;
|
||||
return;
|
||||
}
|
||||
// Look for new ssrcs accross the channel
|
||||
Object.keys(othersChannel.ssrcs).forEach(function(ssrc) {
|
||||
if(Object.keys(myChannel.ssrcs).indexOf(ssrc) === -1) {
|
||||
// Allocate channel if we've found ssrc that doesn't exist in our channel
|
||||
if(!newMedia[channelNum]){
|
||||
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||
}
|
||||
newMedia[channelNum].ssrcs[ssrc] = othersChannel.ssrcs[ssrc];
|
||||
}
|
||||
});
|
||||
|
||||
// Look for new ssrc groups across the channels
|
||||
othersChannel.ssrcGroups.forEach(function(otherSsrcGroup){
|
||||
|
||||
// try to match the other ssrc-group with an ssrc-group of ours
|
||||
var matched = false;
|
||||
for (var i = 0; i < myChannel.ssrcGroups.length; i++) {
|
||||
var mySsrcGroup = myChannel.ssrcGroups[i];
|
||||
if (otherSsrcGroup.semantics == mySsrcGroup.semantics
|
||||
&& arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) {
|
||||
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched) {
|
||||
// Allocate channel if we've found an ssrc-group that doesn't
|
||||
// exist in our channel
|
||||
|
||||
if(!newMedia[channelNum]){
|
||||
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||
}
|
||||
newMedia[channelNum].ssrcGroups.push(otherSsrcGroup);
|
||||
}
|
||||
});
|
||||
});
|
||||
return newMedia;
|
||||
};
|
||||
|
||||
// remove iSAC and CN from SDP
|
||||
SDP.prototype.mangle = function () {
|
||||
var i, j, mline, lines, rtpmap, newdesc;
|
||||
for (i = 0; i < this.media.length; i++) {
|
||||
lines = this.media[i].split('\r\n');
|
||||
lines.pop(); // remove empty last element
|
||||
mline = SDPUtil.parse_mline(lines.shift());
|
||||
if (mline.media != 'audio')
|
||||
continue;
|
||||
newdesc = '';
|
||||
mline.fmt.length = 0;
|
||||
for (j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
|
||||
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
|
||||
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
|
||||
continue;
|
||||
mline.fmt.push(rtpmap.id);
|
||||
newdesc += lines[j] + '\r\n';
|
||||
} else {
|
||||
newdesc += lines[j] + '\r\n';
|
||||
}
|
||||
}
|
||||
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
|
||||
this.media[i] += newdesc;
|
||||
}
|
||||
this.raw = this.session + this.media.join('');
|
||||
};
|
||||
|
||||
// remove lines matching prefix from session section
|
||||
SDP.prototype.removeSessionLines = function(prefix) {
|
||||
var self = this;
|
||||
var lines = SDPUtil.find_lines(this.session, prefix);
|
||||
lines.forEach(function(line) {
|
||||
self.session = self.session.replace(line + '\r\n', '');
|
||||
});
|
||||
this.raw = this.session + this.media.join('');
|
||||
return lines;
|
||||
}
|
||||
// remove lines matching prefix from a media section specified by mediaindex
|
||||
// TODO: non-numeric mediaindex could match mid
|
||||
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
||||
var self = this;
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
|
||||
lines.forEach(function(line) {
|
||||
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
|
||||
});
|
||||
this.raw = this.session + this.media.join('');
|
||||
return lines;
|
||||
}
|
||||
|
||||
// add content's to a jingle element
|
||||
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||
var self = this;
|
||||
// new bundle plan
|
||||
if (SDPUtil.find_line(this.session, 'a=group:')) {
|
||||
lines = SDPUtil.find_lines(this.session, 'a=group:');
|
||||
for (i = 0; i < lines.length; i++) {
|
||||
tmp = lines[i].split(' ');
|
||||
var semantics = tmp.shift().substr(8);
|
||||
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
|
||||
for (j = 0; j < tmp.length; j++) {
|
||||
elem.c('content', {name: tmp[j]}).up();
|
||||
}
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
// old bundle plan, to be removed
|
||||
var bundle = [];
|
||||
if (SDPUtil.find_line(this.session, 'a=group:BUNDLE')) {
|
||||
bundle = SDPUtil.find_line(this.session, 'a=group:BUNDLE ').split(' ');
|
||||
bundle.shift();
|
||||
}
|
||||
for (i = 0; i < this.media.length; i++) {
|
||||
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
|
||||
if (!(mline.media === 'audio' ||
|
||||
mline.media === 'video' ||
|
||||
mline.media === 'application'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||
} else {
|
||||
ssrc = false;
|
||||
}
|
||||
|
||||
elem.c('content', {creator: thecreator, name: mline.media});
|
||||
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
|
||||
// prefer identifier from a=mid if present
|
||||
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
|
||||
elem.attrs({ name: mid });
|
||||
|
||||
// old BUNDLE plan, to be removed
|
||||
if (bundle.indexOf(mid) !== -1) {
|
||||
elem.c('bundle', {xmlns: 'http://estos.de/ns/bundle'}).up();
|
||||
bundle.splice(bundle.indexOf(mid), 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length)
|
||||
{
|
||||
elem.c('description',
|
||||
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
|
||||
media: mline.media });
|
||||
if (ssrc) {
|
||||
elem.attrs({ssrc: ssrc});
|
||||
}
|
||||
for (j = 0; j < mline.fmt.length; j++) {
|
||||
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
|
||||
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
|
||||
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
|
||||
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
|
||||
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
|
||||
for (k = 0; k < tmp.length; k++) {
|
||||
elem.c('parameter', tmp[k]).up();
|
||||
}
|
||||
}
|
||||
this.RtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
|
||||
|
||||
elem.up();
|
||||
}
|
||||
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
|
||||
elem.c('encryption', {required: 1});
|
||||
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
|
||||
crypto.forEach(function(line) {
|
||||
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
|
||||
});
|
||||
elem.up(); // end of encryption
|
||||
}
|
||||
|
||||
if (ssrc) {
|
||||
// new style mapping
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
// FIXME: group by ssrc and support multiple different ssrcs
|
||||
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||
ssrclines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var linessrc = line.substr(0, idx).substr(7);
|
||||
if (linessrc != ssrc) {
|
||||
elem.up();
|
||||
ssrc = linessrc;
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
}
|
||||
var kv = line.substr(idx + 1);
|
||||
elem.c('parameter');
|
||||
if (kv.indexOf(':') == -1) {
|
||||
elem.attrs({ name: kv });
|
||||
} else {
|
||||
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||
}
|
||||
elem.up();
|
||||
});
|
||||
elem.up();
|
||||
|
||||
// old proprietary mapping, to be removed at some point
|
||||
tmp = SDPUtil.parse_ssrc(this.media[i]);
|
||||
tmp.xmlns = 'http://estos.de/ns/ssrc';
|
||||
tmp.ssrc = ssrc;
|
||||
elem.c('ssrc', tmp).up(); // ssrc is part of description
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||
ssrc_group_lines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var semantics = line.substr(0, idx).substr(13);
|
||||
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||
if (ssrcs.length != 0) {
|
||||
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
ssrcs.forEach(function(ssrc) {
|
||||
elem.c('source', { ssrc: ssrc })
|
||||
.up();
|
||||
});
|
||||
elem.up();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
|
||||
elem.c('rtcp-mux').up();
|
||||
}
|
||||
|
||||
// XEP-0293 -- map a=rtcp-fb:*
|
||||
this.RtcpFbToJingle(i, elem, '*');
|
||||
|
||||
// XEP-0294
|
||||
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
|
||||
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
|
||||
for (j = 0; j < lines.length; j++) {
|
||||
tmp = SDPUtil.parse_extmap(lines[j]);
|
||||
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
|
||||
uri: tmp.uri,
|
||||
id: tmp.value });
|
||||
if (tmp.hasOwnProperty('direction')) {
|
||||
switch (tmp.direction) {
|
||||
case 'sendonly':
|
||||
elem.attrs({senders: 'responder'});
|
||||
break;
|
||||
case 'recvonly':
|
||||
elem.attrs({senders: 'initiator'});
|
||||
break;
|
||||
case 'sendrecv':
|
||||
elem.attrs({senders: 'both'});
|
||||
break;
|
||||
case 'inactive':
|
||||
elem.attrs({senders: 'none'});
|
||||
break;
|
||||
}
|
||||
}
|
||||
// TODO: handle params
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
elem.up(); // end of description
|
||||
}
|
||||
|
||||
// map ice-ufrag/pwd, dtls fingerprint, candidates
|
||||
this.TransportToJingle(i, elem);
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
|
||||
elem.attrs({senders: 'both'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
|
||||
elem.attrs({senders: 'initiator'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
|
||||
elem.attrs({senders: 'responder'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
|
||||
elem.attrs({senders: 'none'});
|
||||
}
|
||||
if (mline.port == '0') {
|
||||
// estos hack to reject an m-line
|
||||
elem.attrs({senders: 'rejected'});
|
||||
}
|
||||
elem.up(); // end of content
|
||||
}
|
||||
elem.up();
|
||||
return elem;
|
||||
};
|
||||
|
||||
SDP.prototype.TransportToJingle = function (mediaindex, elem) {
|
||||
var i = mediaindex;
|
||||
var tmp;
|
||||
var self = this;
|
||||
elem.c('transport');
|
||||
|
||||
// XEP-0343 DTLS/SCTP
|
||||
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
|
||||
{
|
||||
var sctpmap = SDPUtil.find_line(
|
||||
this.media[i], 'a=sctpmap:', self.session);
|
||||
if (sctpmap)
|
||||
{
|
||||
var sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
|
||||
elem.c('sctpmap',
|
||||
{
|
||||
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
|
||||
number: sctpAttrs[0], /* SCTP port */
|
||||
protocol: sctpAttrs[1], /* protocol */
|
||||
});
|
||||
// Optional stream count attribute
|
||||
if (sctpAttrs.length > 2)
|
||||
elem.attrs({ streams: sctpAttrs[2]});
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
// XEP-0320
|
||||
var fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
|
||||
fingerprints.forEach(function(line) {
|
||||
tmp = SDPUtil.parse_fingerprint(line);
|
||||
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
|
||||
elem.c('fingerprint').t(tmp.fingerprint);
|
||||
delete tmp.fingerprint;
|
||||
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
|
||||
if (line) {
|
||||
tmp.setup = line.substr(8);
|
||||
}
|
||||
elem.attrs(tmp);
|
||||
elem.up(); // end of fingerprint
|
||||
});
|
||||
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
|
||||
if (tmp) {
|
||||
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||
elem.attrs(tmp);
|
||||
// XEP-0176
|
||||
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
|
||||
lines.forEach(function (line) {
|
||||
elem.c('candidate', SDPUtil.candidateToJingle(line)).up();
|
||||
});
|
||||
}
|
||||
}
|
||||
elem.up(); // end of transport
|
||||
}
|
||||
|
||||
SDP.prototype.RtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
|
||||
lines.forEach(function (line) {
|
||||
var tmp = SDPUtil.parse_rtcpfb(line);
|
||||
if (tmp.type == 'trr-int') {
|
||||
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
|
||||
elem.up();
|
||||
} else {
|
||||
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
|
||||
if (tmp.params.length > 0) {
|
||||
elem.attrs({'subtype': tmp.params[0]});
|
||||
}
|
||||
elem.up();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
SDP.prototype.RtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
|
||||
var media = '';
|
||||
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||
if (tmp.length) {
|
||||
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
|
||||
if (tmp.attr('value')) {
|
||||
media += tmp.attr('value');
|
||||
} else {
|
||||
media += '0';
|
||||
}
|
||||
media += '\r\n';
|
||||
}
|
||||
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||
tmp.each(function () {
|
||||
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
|
||||
if ($(this).attr('subtype')) {
|
||||
media += ' ' + $(this).attr('subtype');
|
||||
}
|
||||
media += '\r\n';
|
||||
});
|
||||
return media;
|
||||
};
|
||||
|
||||
// construct an SDP from a jingle stanza
|
||||
SDP.prototype.fromJingle = function (jingle) {
|
||||
var self = this;
|
||||
this.raw = 'v=0\r\n' +
|
||||
'o=- ' + '1923518516' + ' 2 IN IP4 0.0.0.0\r\n' +// FIXME
|
||||
's=-\r\n' +
|
||||
't=0 0\r\n';
|
||||
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
|
||||
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
|
||||
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
|
||||
var contents = $(group).find('>content').map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (contents.length > 0) {
|
||||
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
} else if ($(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').length) {
|
||||
// temporary namespace, not to be used. to be removed soon.
|
||||
$(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').each(function (idx, group) {
|
||||
var contents = $(group).find('>content').map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (group.getAttribute('type') !== null && contents.length > 0) {
|
||||
self.raw += 'a=group:' + group.getAttribute('type') + ' ' + contents.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// for backward compability, to be removed soon
|
||||
// assume all contents are in the same bundle group, can be improved upon later
|
||||
var bundle = $(jingle).find('>content').filter(function (idx, content) {
|
||||
//elem.c('bundle', {xmlns:'http://estos.de/ns/bundle'});
|
||||
return $(content).find('>bundle').length > 0;
|
||||
}).map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (bundle.length) {
|
||||
this.raw += 'a=group:BUNDLE ' + bundle.join(' ') + '\r\n';
|
||||
}
|
||||
}
|
||||
|
||||
this.session = this.raw;
|
||||
jingle.find('>content').each(function () {
|
||||
var m = self.jingle2media($(this));
|
||||
self.media.push(m);
|
||||
});
|
||||
|
||||
// reconstruct msid-semantic -- apparently not necessary
|
||||
/*
|
||||
var msid = SDPUtil.parse_ssrc(this.raw);
|
||||
if (msid.hasOwnProperty('mslabel')) {
|
||||
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
|
||||
}
|
||||
*/
|
||||
|
||||
this.raw = this.session + this.media.join('');
|
||||
};
|
||||
|
||||
// translate a jingle content element into an an SDP media part
|
||||
SDP.prototype.jingle2media = function (content) {
|
||||
var media = '',
|
||||
desc = content.find('description'),
|
||||
ssrc = desc.attr('ssrc'),
|
||||
self = this,
|
||||
tmp;
|
||||
var sctp = content.find(
|
||||
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
|
||||
|
||||
tmp = { media: desc.attr('media') };
|
||||
tmp.port = '1';
|
||||
if (content.attr('senders') == 'rejected') {
|
||||
// estos hack to reject an m-line.
|
||||
tmp.port = '0';
|
||||
}
|
||||
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
|
||||
if (sctp.length)
|
||||
tmp.proto = 'DTLS/SCTP';
|
||||
else
|
||||
tmp.proto = 'RTP/SAVPF';
|
||||
} else {
|
||||
tmp.proto = 'RTP/AVPF';
|
||||
}
|
||||
if (!sctp.length)
|
||||
{
|
||||
tmp.fmt = desc.find('payload-type').map(
|
||||
function () { return this.getAttribute('id'); }).get();
|
||||
media += SDPUtil.build_mline(tmp) + '\r\n';
|
||||
}
|
||||
else
|
||||
{
|
||||
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
|
||||
media += 'a=sctpmap:' + sctp.attr('number') +
|
||||
' ' + sctp.attr('protocol');
|
||||
|
||||
var streamCount = sctp.attr('streams');
|
||||
if (streamCount)
|
||||
media += ' ' + streamCount + '\r\n';
|
||||
else
|
||||
media += '\r\n';
|
||||
}
|
||||
|
||||
media += 'c=IN IP4 0.0.0.0\r\n';
|
||||
if (!sctp.length)
|
||||
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
|
||||
//tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||
tmp = content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||
//console.log('transports: '+content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||
//console.log('bundle.transports: '+content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||
//console.log("tmp fingerprint: "+tmp.find('>fingerprint').innerHTML);
|
||||
if (tmp.length) {
|
||||
if (tmp.attr('ufrag')) {
|
||||
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
|
||||
}
|
||||
if (tmp.attr('pwd')) {
|
||||
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
|
||||
}
|
||||
tmp.find('>fingerprint').each(function () {
|
||||
// FIXME: check namespace at some point
|
||||
media += 'a=fingerprint:' + this.getAttribute('hash');
|
||||
media += ' ' + $(this).text();
|
||||
media += '\r\n';
|
||||
//console.log("mline "+media);
|
||||
if (this.getAttribute('setup')) {
|
||||
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
|
||||
}
|
||||
});
|
||||
}
|
||||
switch (content.attr('senders')) {
|
||||
case 'initiator':
|
||||
media += 'a=sendonly\r\n';
|
||||
break;
|
||||
case 'responder':
|
||||
media += 'a=recvonly\r\n';
|
||||
break;
|
||||
case 'none':
|
||||
media += 'a=inactive\r\n';
|
||||
break;
|
||||
case 'both':
|
||||
media += 'a=sendrecv\r\n';
|
||||
break;
|
||||
}
|
||||
media += 'a=mid:' + content.attr('name') + '\r\n';
|
||||
/*if (content.attr('name') == 'video') {
|
||||
media += 'a=x-google-flag:conference' + '\r\n';
|
||||
}*/
|
||||
|
||||
// <description><rtcp-mux/></description>
|
||||
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
|
||||
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
|
||||
if (desc.find('rtcp-mux').length) {
|
||||
media += 'a=rtcp-mux\r\n';
|
||||
}
|
||||
|
||||
if (desc.find('encryption').length) {
|
||||
desc.find('encryption>crypto').each(function () {
|
||||
media += 'a=crypto:' + this.getAttribute('tag');
|
||||
media += ' ' + this.getAttribute('crypto-suite');
|
||||
media += ' ' + this.getAttribute('key-params');
|
||||
if (this.getAttribute('session-params')) {
|
||||
media += ' ' + this.getAttribute('session-params');
|
||||
}
|
||||
media += '\r\n';
|
||||
});
|
||||
}
|
||||
desc.find('payload-type').each(function () {
|
||||
media += SDPUtil.build_rtpmap(this) + '\r\n';
|
||||
if ($(this).find('>parameter').length) {
|
||||
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
|
||||
media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; ');
|
||||
media += '\r\n';
|
||||
}
|
||||
// xep-0293
|
||||
media += self.RtcpFbFromJingle($(this), this.getAttribute('id'));
|
||||
});
|
||||
|
||||
// xep-0293
|
||||
media += self.RtcpFbFromJingle(desc, '*');
|
||||
|
||||
// xep-0294
|
||||
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
|
||||
tmp.each(function () {
|
||||
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
|
||||
});
|
||||
|
||||
content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
|
||||
media += SDPUtil.candidateFromJingle(this);
|
||||
});
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
tmp = content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
|
||||
var semantics = this.getAttribute('semantics');
|
||||
var ssrcs = $(this).find('>source').map(function() {
|
||||
return this.getAttribute('ssrc');
|
||||
}).get();
|
||||
|
||||
if (ssrcs.length != 0) {
|
||||
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
|
||||
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
|
||||
tmp.each(function () {
|
||||
var ssrc = this.getAttribute('ssrc');
|
||||
$(this).find('>parameter').each(function () {
|
||||
media += 'a=ssrc:' + ssrc + ' ' + this.getAttribute('name');
|
||||
if (this.getAttribute('value') && this.getAttribute('value').length)
|
||||
media += ':' + this.getAttribute('value');
|
||||
media += '\r\n';
|
||||
});
|
||||
});
|
||||
|
||||
if (tmp.length === 0) {
|
||||
// fallback to proprietary mapping of a=ssrc lines
|
||||
tmp = content.find('description>ssrc[xmlns="http://estos.de/ns/ssrc"]');
|
||||
if (tmp.length) {
|
||||
media += 'a=ssrc:' + ssrc + ' cname:' + tmp.attr('cname') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' msid:' + tmp.attr('msid') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' mslabel:' + tmp.attr('mslabel') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' label:' + tmp.attr('label') + '\r\n';
|
||||
}
|
||||
}
|
||||
return media;
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user