Compare commits
17 Commits
mv/complem
...
squah/leav
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e2f5c043e | ||
|
|
07580acdc0 | ||
|
|
64c56177a4 | ||
|
|
94586c596f | ||
|
|
856656a8ee | ||
|
|
e5751a6350 | ||
|
|
b105beafdb | ||
|
|
f77da61ce8 | ||
|
|
8627a456e3 | ||
|
|
b43d085472 | ||
|
|
ab89c60702 | ||
|
|
b8c228ae98 | ||
|
|
3371ec0b85 | ||
|
|
75be1be9d5 | ||
|
|
f004687410 | ||
|
|
98873d7be3 | ||
|
|
17bc6167d6 |
@@ -1,91 +0,0 @@
|
|||||||
{{- /*gotype: github.com/haveyoudebuggedit/gotestfmt/parser.Package*/ -}}
|
|
||||||
{{- /*
|
|
||||||
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
|
|
||||||
we are creating a stylized header for each package.
|
|
||||||
|
|
||||||
This template is based on https://github.com/haveyoudebuggedit/gotestfmt/blob/f179b0e462a9dcf7101515d87eec4e4d7e58b92a/.gotestfmt/github/package.gotpl
|
|
||||||
which is under the Unlicense licence.
|
|
||||||
*/ -}}
|
|
||||||
{{- $settings := .Settings -}}
|
|
||||||
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
|
|
||||||
{{- if eq .Result "PASS" -}}
|
|
||||||
{{ "\033" }}[0;32m
|
|
||||||
{{- else if eq .Result "SKIP" -}}
|
|
||||||
{{ "\033" }}[0;33m
|
|
||||||
{{- else -}}
|
|
||||||
{{ "\033" }}[0;31m
|
|
||||||
{{- end -}}
|
|
||||||
📦 {{ .Name }}{{- "\033" }}[0m
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
|
|
||||||
{{- end -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- with .Reason -}}
|
|
||||||
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- . -}}{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- with .TestCases -}}
|
|
||||||
{{- /* Passing tests are first */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if eq .Result "PASS" -}}
|
|
||||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{- /* Then skipped tests are second */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if eq .Result "SKIP" -}}
|
|
||||||
::group::{{ "\033" }}[0;33m🚧{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{- /* and failing tests are last */ -}}
|
|
||||||
{{- range . -}}
|
|
||||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
|
||||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
|
||||||
{{- with .Coverage -}}
|
|
||||||
, coverage: {{ . }}%
|
|
||||||
{{- end -}})
|
|
||||||
{{- "\033" -}}[0m
|
|
||||||
{{- "\n" -}}
|
|
||||||
|
|
||||||
{{- with .Output -}}
|
|
||||||
{{- formatTestOutput . $settings -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
::endgroup::{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- end -}}
|
|
||||||
{{- "\n" -}}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
title: CI run against latest deps is failing
|
|
||||||
---
|
|
||||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
|
||||||
8
.ci/patch_for_twisted_trunk.sh
Executable file
8
.ci/patch_for_twisted_trunk.sh
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
cd "$(dirname "$0")"/..
|
||||||
|
|
||||||
|
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# Calculate the trial jobs to run based on if we're in a PR or not.
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|
||||||
|
|
||||||
# First calculate the various trial jobs.
|
|
||||||
#
|
|
||||||
# For each type of test we only run on Py3.7 on PRs
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
|
||||||
{
|
|
||||||
"python-version": "3.7",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
if not IS_PR:
|
|
||||||
trial_sqlite_tests.extend(
|
|
||||||
{
|
|
||||||
"python-version": version,
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
for version in ("3.8", "3.9", "3.10")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
trial_postgres_tests = [
|
|
||||||
{
|
|
||||||
"python-version": "3.7",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "10",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
if not IS_PR:
|
|
||||||
trial_postgres_tests.append(
|
|
||||||
{
|
|
||||||
"python-version": "3.10",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "14",
|
|
||||||
"extras": "all",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
|
||||||
{
|
|
||||||
"python-version": "3.7",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
print("::group::Calculated trial jobs")
|
|
||||||
print(
|
|
||||||
json.dumps(
|
|
||||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
|
|
||||||
)
|
|
||||||
)
|
|
||||||
print("::endgroup::")
|
|
||||||
|
|
||||||
test_matrix = json.dumps(
|
|
||||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
|
|
||||||
)
|
|
||||||
print(f"::set-output name=trial_test_matrix::{test_matrix}")
|
|
||||||
|
|
||||||
|
|
||||||
# First calculate the various sytest jobs.
|
|
||||||
#
|
|
||||||
# For each type of test we only run on focal on PRs
|
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
|
||||||
{
|
|
||||||
"sytest-tag": "focal",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"sytest-tag": "focal",
|
|
||||||
"postgres": "postgres",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"sytest-tag": "focal",
|
|
||||||
"postgres": "multi-postgres",
|
|
||||||
"workers": "workers",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
if not IS_PR:
|
|
||||||
sytest_tests.extend(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"sytest-tag": "testing",
|
|
||||||
"postgres": "postgres",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"sytest-tag": "buster",
|
|
||||||
"postgres": "multi-postgres",
|
|
||||||
"workers": "workers",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
print("::group::Calculated sytest jobs")
|
|
||||||
print(json.dumps(sytest_tests, indent=4))
|
|
||||||
print("::endgroup::")
|
|
||||||
|
|
||||||
test_matrix = json.dumps(sytest_tests)
|
|
||||||
print(f"::set-output name=sytest_test_matrix::{test_matrix}")
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Fetches a version of complement which best matches the current build.
|
|
||||||
#
|
|
||||||
# The tarball is unpacked into `./complement`.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
mkdir -p complement
|
|
||||||
|
|
||||||
# Pick an appropriate version of complement. Depending on whether this is a PR or release,
|
|
||||||
# etc. we need to use different fallbacks:
|
|
||||||
#
|
|
||||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
|
||||||
# for pull requests, otherwise GITHUB_REF).
|
|
||||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
|
||||||
# (GITHUB_BASE_REF for pull requests).
|
|
||||||
# 3. Use the default complement branch ("HEAD").
|
|
||||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
|
||||||
# Skip empty branch names and merge commits.
|
|
||||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
|
||||||
done
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# wraps `gotestfmt`, hiding output from successful packages unless
|
|
||||||
# all tests passed.
|
|
||||||
|
|
||||||
set -o pipefail
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# tee the test results to a log, whilst also piping them into gotestfmt,
|
|
||||||
# telling it to hide successful results, so that we can clearly see
|
|
||||||
# unsuccessful results.
|
|
||||||
tee complement.log | gotestfmt -hide successful-packages
|
|
||||||
|
|
||||||
# gotestfmt will exit non-zero if there were any failures, so if we got to this
|
|
||||||
# point, we must have had a successful result.
|
|
||||||
echo "All tests successful; showing all test results"
|
|
||||||
|
|
||||||
# Pipe the test results back through gotestfmt, showing all results.
|
|
||||||
# The log file consists of JSON lines giving the test results, interspersed
|
|
||||||
# with regular stdout lines (including reports of downloaded packages).
|
|
||||||
grep '^{"Time":' complement.log | gotestfmt
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
|
||||||
# - installs the minimal system requirements, and poetry;
|
|
||||||
# - patches the project definition file to refer to old versions only;
|
|
||||||
# - creates a venv with these old versions using poetry; and finally
|
|
||||||
# - invokes `trial` to run the tests with old deps.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
|
||||||
export VIRTUALENV_NO_DOWNLOAD=1
|
|
||||||
|
|
||||||
# TODO: in the future, we could use an implementation of
|
|
||||||
# https://github.com/python-poetry/poetry/issues/3527
|
|
||||||
# https://github.com/pypa/pip/issues/8085
|
|
||||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
|
||||||
|
|
||||||
# Patch the project definitions in-place:
|
|
||||||
# - Replace all lower and tilde bounds with exact bounds
|
|
||||||
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
|
||||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
|
||||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
|
||||||
# a `cryptography` compiled against OpenSSL 1.1.
|
|
||||||
# - Omit systemd: we're not logging to journal here.
|
|
||||||
|
|
||||||
sed -i \
|
|
||||||
-e "s/[~>]=/==/g" \
|
|
||||||
-e '/^python = "^/!s/\^/==/g' \
|
|
||||||
-e "/psycopg2/d" \
|
|
||||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
|
||||||
-e '/systemd/d' \
|
|
||||||
pyproject.toml
|
|
||||||
|
|
||||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
|
||||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
|
||||||
# is more lax.
|
|
||||||
#
|
|
||||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
|
||||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
|
||||||
# dev tools.
|
|
||||||
|
|
||||||
pip install toml wheel
|
|
||||||
|
|
||||||
REMOVE_DEV_DEPENDENCIES="
|
|
||||||
import toml
|
|
||||||
with open('pyproject.toml', 'r') as f:
|
|
||||||
data = toml.loads(f.read())
|
|
||||||
|
|
||||||
del data['tool']['poetry']['dev-dependencies']
|
|
||||||
|
|
||||||
with open('pyproject.toml', 'w') as f:
|
|
||||||
toml.dump(data, f)
|
|
||||||
"
|
|
||||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
|
||||||
|
|
||||||
pip install poetry==1.2.0
|
|
||||||
poetry lock
|
|
||||||
|
|
||||||
echo "::group::Patched pyproject.toml"
|
|
||||||
cat pyproject.toml
|
|
||||||
echo "::endgroup::"
|
|
||||||
echo "::group::Lockfile after patch"
|
|
||||||
cat poetry.lock
|
|
||||||
echo "::endgroup::"
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
#
|
|
||||||
# Common commands to set up Complement's prerequisites in a GitHub Actions CI run.
|
|
||||||
#
|
|
||||||
# Must be called after Synapse has been checked out to `synapse/`.
|
|
||||||
#
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
|
||||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
|
||||||
|
|
||||||
block Set Go Version
|
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
endblock
|
|
||||||
|
|
||||||
block Install Complement Dependencies
|
|
||||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
|
||||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
endblock
|
|
||||||
|
|
||||||
block Install custom gotestfmt template
|
|
||||||
mkdir .gotestfmt/github -p
|
|
||||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
|
||||||
endblock
|
|
||||||
|
|
||||||
block Check out Complement
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
synapse/.ci/scripts/checkout_complement.sh
|
|
||||||
endblock
|
|
||||||
@@ -2,24 +2,29 @@
|
|||||||
|
|
||||||
# Test for the export-data admin command against sqlite and postgres
|
# Test for the export-data admin command against sqlite and postgres
|
||||||
|
|
||||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
|
||||||
# Expects `poetry` to be available on the `PATH`.
|
|
||||||
|
|
||||||
set -xe
|
set -xe
|
||||||
cd "$(dirname "$0")/../.."
|
cd "$(dirname "$0")/../.."
|
||||||
|
|
||||||
|
echo "--- Install dependencies"
|
||||||
|
|
||||||
|
# Install dependencies for this test.
|
||||||
|
pip install psycopg2
|
||||||
|
|
||||||
|
# Install Synapse itself. This won't update any libraries.
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
echo "--- Generate the signing key"
|
||||||
|
|
||||||
# Generate the server's signing key.
|
# Generate the server's signing key.
|
||||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
echo "--- Prepare test database"
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||||
|
|
||||||
# Run the export-data command on the sqlite test database
|
# Run the export-data command on the sqlite test database
|
||||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||||
--output-directory /tmp/export_data
|
--output-directory /tmp/export_data
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
# Test that the output directory exists and contains the rooms directory
|
||||||
@@ -32,14 +37,14 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
# Create the PostgreSQL database.
|
||||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||||
|
|
||||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||||
echo "+++ Port SQLite3 databse to postgres"
|
echo "+++ Port SQLite3 databse to postgres"
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|
||||||
# Run the export-data command on postgres database
|
# Run the export-data command on postgres database
|
||||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||||
--output-directory /tmp/export_data2
|
--output-directory /tmp/export_data2
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
# Test that the output directory exists and contains the rooms directory
|
||||||
|
|||||||
16
.ci/scripts/test_old_deps.sh
Executable file
16
.ci/scripts/test_old_deps.sh
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# this script is run by GitHub Actions in a plain `bionic` container; it installs the
|
||||||
|
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||||
|
|
||||||
|
export LANG="C.UTF-8"
|
||||||
|
|
||||||
|
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||||
|
export VIRTUALENV_NO_DOWNLOAD=1
|
||||||
|
|
||||||
|
exec tox -e py3-old,combine
|
||||||
@@ -1,37 +1,41 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#
|
#
|
||||||
# Test script for 'synapse_port_db'.
|
# Test script for 'synapse_port_db'.
|
||||||
# - configures synapse and a postgres server.
|
# - sets up synapse and deps
|
||||||
# - runs the port script on a prepopulated test sqlite db
|
# - runs the port script on a prepopulated test sqlite db
|
||||||
# - also runs it against an new sqlite db
|
# - also runs it against an new sqlite db
|
||||||
#
|
|
||||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
|
||||||
# Expects `poetry` to be available on the `PATH`.
|
|
||||||
|
|
||||||
set -xe
|
set -xe
|
||||||
cd "$(dirname "$0")/../.."
|
cd "$(dirname "$0")/../.."
|
||||||
|
|
||||||
|
echo "--- Install dependencies"
|
||||||
|
|
||||||
|
# Install dependencies for this test.
|
||||||
|
pip install psycopg2 coverage coverage-enable-subprocess
|
||||||
|
|
||||||
|
# Install Synapse itself. This won't update any libraries.
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
echo "--- Generate the signing key"
|
||||||
|
|
||||||
# Generate the server's signing key.
|
# Generate the server's signing key.
|
||||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
echo "--- Prepare test database"
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
# Create the PostgreSQL database.
|
||||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against test database"
|
echo "+++ Run synapse_port_db against test database"
|
||||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# We should be able to run twice against the same database.
|
# We should be able to run twice against the same database.
|
||||||
echo "+++ Run synapse_port_db a second time"
|
echo "+++ Run synapse_port_db a second time"
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|
||||||
#####
|
#####
|
||||||
|
|
||||||
@@ -42,12 +46,12 @@ echo "--- Prepare empty SQLite database"
|
|||||||
# we do this by deleting the sqlite db, and then doing the same again.
|
# we do this by deleting the sqlite db, and then doing the same again.
|
||||||
rm .ci/test_db.db
|
rm .ci/test_db.db
|
||||||
|
|
||||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||||
|
|
||||||
# re-create the PostgreSQL database.
|
# re-create the PostgreSQL database.
|
||||||
poetry run .ci/scripts/postgres_exec.py \
|
.ci/scripts/postgres_exec.py \
|
||||||
"DROP DATABASE synapse" \
|
"DROP DATABASE synapse" \
|
||||||
"CREATE DATABASE synapse"
|
"CREATE DATABASE synapse"
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against empty database"
|
echo "+++ Run synapse_port_db against empty database"
|
||||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|||||||
@@ -3,13 +3,11 @@
|
|||||||
|
|
||||||
# things to include
|
# things to include
|
||||||
!docker
|
!docker
|
||||||
|
!scripts
|
||||||
!synapse
|
!synapse
|
||||||
!rust
|
!MANIFEST.in
|
||||||
!README.rst
|
!README.rst
|
||||||
!pyproject.toml
|
!setup.py
|
||||||
!poetry.lock
|
!synctl
|
||||||
!build_rust.py
|
|
||||||
|
|
||||||
rust/target
|
|
||||||
|
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
|||||||
@@ -7,4 +7,3 @@ root = true
|
|||||||
[*.py]
|
[*.py]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
max_line_length = 88
|
|
||||||
|
|||||||
11
.flake8
11
.flake8
@@ -1,11 +0,0 @@
|
|||||||
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
|
||||||
# See https://github.com/PyCQA/flake8/issues/234
|
|
||||||
[flake8]
|
|
||||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
|
||||||
# for error codes. The ones we ignore are:
|
|
||||||
# W503: line break before binary operator
|
|
||||||
# W504: line break after binary operator
|
|
||||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
|
||||||
# E731: do not assign a lambda expression, use a def
|
|
||||||
# E501: Line too long (black enforces this for us)
|
|
||||||
ignore=W503,W504,E203,E731,E501
|
|
||||||
@@ -1,16 +1,3 @@
|
|||||||
# Commits in this file will be removed from GitHub blame results.
|
|
||||||
#
|
|
||||||
# To use this file locally, use:
|
|
||||||
# git blame --ignore-revs-file="path/to/.git-blame-ignore-revs" <files>
|
|
||||||
#
|
|
||||||
# or configure the `blame.ignoreRevsFile` option in your git config.
|
|
||||||
#
|
|
||||||
# If ignoring a pull request that was not squash merged, only the merge
|
|
||||||
# commit needs to be put here. Child commits will be resolved from it.
|
|
||||||
|
|
||||||
# Run black (#3679).
|
|
||||||
8b3d9b6b199abb87246f982d5db356f1966db925
|
|
||||||
|
|
||||||
# Black reformatting (#5482).
|
# Black reformatting (#5482).
|
||||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
||||||
|
|
||||||
@@ -19,6 +6,3 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
|
|||||||
|
|
||||||
# Update black to 20.8b1 (#9381).
|
# Update black to 20.8b1 (#9381).
|
||||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||||
|
|
||||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
|
||||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|
||||||
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
|
||||||
|
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||||
|
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
||||||
|
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
||||||
|
|
||||||
|
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||||
|
|
||||||
|
This is a bug report template. By following the instructions below and
|
||||||
|
filling out the sections with your information, you will help the us to get all
|
||||||
|
the necessary data to fix your issue.
|
||||||
|
|
||||||
|
You can also preview your report before submitting it. You may remove sections
|
||||||
|
that aren't relevant to your particular case.
|
||||||
|
|
||||||
|
Text between <!-- and --> marks will be invisible in the report.
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Description
|
||||||
|
|
||||||
|
<!-- Describe here the problem that you are experiencing -->
|
||||||
|
|
||||||
|
### Steps to reproduce
|
||||||
|
|
||||||
|
- list the steps
|
||||||
|
- that reproduce the bug
|
||||||
|
- using hyphens as bullet points
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Describe how what happens differs from what you expected.
|
||||||
|
|
||||||
|
If you can identify any relevant log snippets from _homeserver.log_, please include
|
||||||
|
those (please be careful to remove any personal or private data). Please surround them with
|
||||||
|
``` (three backticks, on a line on their own), so that they are formatted legibly.
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Version information
|
||||||
|
|
||||||
|
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||||
|
|
||||||
|
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||||
|
- **Homeserver**:
|
||||||
|
|
||||||
|
If not matrix.org:
|
||||||
|
|
||||||
|
<!--
|
||||||
|
What version of Synapse is running?
|
||||||
|
|
||||||
|
You can find the Synapse version with this command:
|
||||||
|
|
||||||
|
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||||
|
|
||||||
|
(You may need to replace `localhost:8008` if Synapse is not configured to
|
||||||
|
listen on that port.)
|
||||||
|
-->
|
||||||
|
- **Version**:
|
||||||
|
|
||||||
|
- **Install method**:
|
||||||
|
<!-- examples: package manager/git clone/pip -->
|
||||||
|
|
||||||
|
- **Platform**:
|
||||||
|
<!--
|
||||||
|
Tell us about the environment in which your homeserver is operating
|
||||||
|
distro, hardware, if it's running in a vm/container, etc.
|
||||||
|
-->
|
||||||
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -1,103 +0,0 @@
|
|||||||
name: Bug report
|
|
||||||
description: Create a report to help us improve
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
|
||||||
|
|
||||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
|
||||||
|
|
||||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
|
||||||
|
|
||||||
You can also preview your report before submitting it.
|
|
||||||
- type: textarea
|
|
||||||
id: description
|
|
||||||
attributes:
|
|
||||||
label: Description
|
|
||||||
description: Describe the problem that you are experiencing
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: reproduction_steps
|
|
||||||
attributes:
|
|
||||||
label: Steps to reproduce
|
|
||||||
description: |
|
|
||||||
Describe the series of steps that leads you to the problem.
|
|
||||||
|
|
||||||
Describe how what happens differs from what you expected.
|
|
||||||
placeholder: Tell us what you see!
|
|
||||||
value: |
|
|
||||||
- list the steps
|
|
||||||
- that reproduce the bug
|
|
||||||
- using hyphens as bullet points
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
---
|
|
||||||
|
|
||||||
**IMPORTANT**: please answer the following questions, to help us narrow down the problem.
|
|
||||||
- type: input
|
|
||||||
id: homeserver
|
|
||||||
attributes:
|
|
||||||
label: Homeserver
|
|
||||||
description: Which homeserver was this issue identified on? (matrix.org, another homeserver, etc)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: version
|
|
||||||
attributes:
|
|
||||||
label: Synapse Version
|
|
||||||
description: |
|
|
||||||
What version of Synapse is this homeserver running?
|
|
||||||
|
|
||||||
You can find the Synapse version by visiting https://yourserver.example.com/_matrix/federation/v1/version
|
|
||||||
|
|
||||||
or with this command:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
|
||||||
```
|
|
||||||
|
|
||||||
(You may need to replace `localhost:8008` if Synapse is not configured to listen on that port.)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: install_method
|
|
||||||
attributes:
|
|
||||||
label: Installation Method
|
|
||||||
options:
|
|
||||||
- Docker (matrixdotorg/synapse)
|
|
||||||
- Debian packages from packages.matrix.org
|
|
||||||
- pip (from PyPI)
|
|
||||||
- Other (please mention below)
|
|
||||||
- type: textarea
|
|
||||||
id: platform
|
|
||||||
attributes:
|
|
||||||
label: Platform
|
|
||||||
description: |
|
|
||||||
Tell us about the environment in which your homeserver is operating...
|
|
||||||
e.g. distro, hardware, if it's running in a vm/container, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: logs
|
|
||||||
attributes:
|
|
||||||
label: Relevant log output
|
|
||||||
description: |
|
|
||||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
|
||||||
This will be automatically formatted into code, so there is no need for backticks.
|
|
||||||
|
|
||||||
Please be careful to remove any personal or private data.
|
|
||||||
|
|
||||||
**Bug reports are usually very difficult to diagnose without logging.**
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: anything_else
|
|
||||||
attributes:
|
|
||||||
label: Anything else that would be useful to know?
|
|
||||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -8,7 +8,6 @@
|
|||||||
- Use markdown where necessary, mostly for `code blocks`.
|
- Use markdown where necessary, mostly for `code blocks`.
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
|
||||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
||||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
||||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||||
|
|||||||
38
.github/workflows/docker.yml
vendored
38
.github/workflows/docker.yml
vendored
@@ -36,22 +36,40 @@ jobs:
|
|||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
id: set-tag
|
id: set-tag
|
||||||
uses: docker/metadata-action@master
|
run: |
|
||||||
|
case "${GITHUB_REF}" in
|
||||||
|
refs/heads/develop)
|
||||||
|
tag=develop
|
||||||
|
;;
|
||||||
|
refs/heads/master|refs/heads/main)
|
||||||
|
tag=latest
|
||||||
|
;;
|
||||||
|
refs/tags/*)
|
||||||
|
tag=${GITHUB_REF#refs/tags/}
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
tag=${GITHUB_SHA}
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
echo "::set-output name=tag::$tag"
|
||||||
|
|
||||||
|
# for release builds, we want to get the amd64 image out asap, so first
|
||||||
|
# we do an amd64-only build, before following up with a multiarch build.
|
||||||
|
- name: Build and push amd64
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
|
||||||
with:
|
with:
|
||||||
images: matrixdotorg/synapse
|
push: true
|
||||||
flavor: |
|
labels: "gitsha1=${{ github.sha }}"
|
||||||
latest=false
|
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||||
tags: |
|
file: "docker/Dockerfile"
|
||||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
platforms: linux/amd64
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
|
||||||
type=pep440,pattern={{raw}}
|
|
||||||
|
|
||||||
- name: Build and push all platforms
|
- name: Build and push all platforms
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
labels: "gitsha1=${{ github.sha }}"
|
labels: "gitsha1=${{ github.sha }}"
|
||||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||||
file: "docker/Dockerfile"
|
file: "docker/Dockerfile"
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.9'
|
||||||
|
|
||||||
- name: Build the documentation
|
- name: Build the documentation
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||||
|
|||||||
218
.github/workflows/latest_deps.yml
vendored
218
.github/workflows/latest_deps.yml
vendored
@@ -1,218 +0,0 @@
|
|||||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
|
||||||
# dependencies which match the broad requirements. Since most CI runs are against
|
|
||||||
# the locked poetry environment, run specifically against the latest dependencies to
|
|
||||||
# know if there's an upcoming breaking change.
|
|
||||||
#
|
|
||||||
# As an overview this workflow:
|
|
||||||
# - checks out develop,
|
|
||||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
|
||||||
# - runs mypy and test suites in that checkout.
|
|
||||||
#
|
|
||||||
# Based on the twisted trunk CI job.
|
|
||||||
|
|
||||||
name: Latest dependencies
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 7 * * *
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
|
||||||
# poetry-core versions), so we install with poetry.
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
poetry-version: "1.2.0"
|
|
||||||
extras: "all"
|
|
||||||
# Dump installed versions for debugging.
|
|
||||||
- run: poetry run pip list > before.txt
|
|
||||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
|
||||||
- run: poetry update --no-dev
|
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
|
||||||
- run: poetry run mypy
|
|
||||||
trial:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- database: "sqlite"
|
|
||||||
- database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: pip install .[all,test]
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
|
|
||||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
|
||||||
# (rather than use an editable install, which we no longer support). If we
|
|
||||||
# don't do this then python can't find the native lib.
|
|
||||||
- run: rm -rf synapse/
|
|
||||||
|
|
||||||
- run: python -m twisted.trial --jobs=2 tests
|
|
||||||
env:
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:testing
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
env:
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
|
||||||
run: rm /src/poetry.lock
|
|
||||||
working-directory: /src
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
# Open an issue if the build fails, so we know about it.
|
|
||||||
# Only do this if we're not experimenting with this action in a PR.
|
|
||||||
open-issue:
|
|
||||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
|
||||||
needs:
|
|
||||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
|
||||||
- mypy
|
|
||||||
- trial
|
|
||||||
- sytest
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
|
||||||
76
.github/workflows/release-artifacts.yml
vendored
76
.github/workflows/release-artifacts.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
# of things breaking (but only build one set of debs)
|
# of things breaking (but only build one set of debs)
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: ["develop", "release-*"]
|
branches: ["develop"]
|
||||||
|
|
||||||
# we do the full build on tags.
|
# we do the full build on tags.
|
||||||
tags: ["v*"]
|
tags: ["v*"]
|
||||||
@@ -15,7 +15,7 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ jobs:
|
|||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
dists='["debian:sid"]'
|
dists='["debian:sid"]'
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
dists=$(scripts-dev/build_debian_packages --show-dists-json)
|
||||||
fi
|
fi
|
||||||
echo "::set-output name=distros::$dists"
|
echo "::set-output name=distros::$dists"
|
||||||
# map the step outputs to job outputs
|
# map the step outputs to job outputs
|
||||||
@@ -74,7 +74,7 @@ jobs:
|
|||||||
# see https://github.com/docker/build-push-action/issues/252
|
# see https://github.com/docker/build-push-action/issues/252
|
||||||
# for the cache magic here
|
# for the cache magic here
|
||||||
run: |
|
run: |
|
||||||
./src/scripts-dev/build_debian_packages.py \
|
./src/scripts-dev/build_debian_packages \
|
||||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||||
--docker-build-arg=--progress=plain \
|
--docker-build-arg=--progress=plain \
|
||||||
@@ -89,67 +89,19 @@ jobs:
|
|||||||
name: debs
|
name: debs
|
||||||
path: debs/*
|
path: debs/*
|
||||||
|
|
||||||
build-wheels:
|
|
||||||
name: Build wheels on ${{ matrix.os }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-20.04, macos-10.15]
|
|
||||||
is_pr:
|
|
||||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
|
||||||
|
|
||||||
exclude:
|
|
||||||
# Don't build macos wheels on PR CI.
|
|
||||||
- is_pr: true
|
|
||||||
os: "macos-10.15"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v3
|
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
|
||||||
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
|
|
||||||
|
|
||||||
# Only build a single wheel in CI.
|
|
||||||
- name: Set env vars.
|
|
||||||
run: |
|
|
||||||
echo "CIBW_BUILD="cp37-manylinux_x86_64"" >> $GITHUB_ENV
|
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
|
||||||
|
|
||||||
- name: Build wheels
|
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
|
||||||
env:
|
|
||||||
# Skip testing for platforms which various libraries don't have wheels
|
|
||||||
# for, and so need extra build deps.
|
|
||||||
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: Wheel
|
|
||||||
path: ./wheelhouse/*.whl
|
|
||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
name: Build sdist
|
name: "Build pypi distribution files"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v2
|
||||||
with:
|
- run: pip install wheel
|
||||||
python-version: '3.10'
|
- run: |
|
||||||
|
python setup.py sdist bdist_wheel
|
||||||
- run: pip install build
|
|
||||||
|
|
||||||
- name: Build sdist
|
|
||||||
run: python -m build --sdist
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: python-dist
|
||||||
path: dist/*.tar.gz
|
path: dist/*
|
||||||
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
# if it's a tag, create a release and attach the artifacts to it
|
||||||
attach-assets:
|
attach-assets:
|
||||||
@@ -157,7 +109,6 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
||||||
needs:
|
needs:
|
||||||
- build-debs
|
- build-debs
|
||||||
- build-wheels
|
|
||||||
- build-sdist
|
- build-sdist
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -171,8 +122,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
Sdist/*
|
python-dist/*
|
||||||
Wheel/*
|
|
||||||
debs.tar.xz
|
debs.tar.xz
|
||||||
# if it's not already published, keep the release as a draft.
|
# if it's not already published, keep the release as a draft.
|
||||||
draft: true
|
draft: true
|
||||||
|
|||||||
429
.github/workflows/tests.yml
vendored
429
.github/workflows/tests.yml
vendored
@@ -10,44 +10,22 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
|
||||||
# don't modify Rust code.
|
|
||||||
changes:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
|
||||||
steps:
|
|
||||||
- uses: dorny/paths-filter@v2
|
|
||||||
id: filter
|
|
||||||
# We only check on PRs
|
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
|
||||||
with:
|
|
||||||
filters: |
|
|
||||||
rust:
|
|
||||||
- 'rust/**'
|
|
||||||
- 'Cargo.toml'
|
|
||||||
|
|
||||||
check-sampleconfig:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install .
|
|
||||||
- run: scripts-dev/generate_sample_config.sh --check
|
|
||||||
- run: scripts-dev/config-lint.sh
|
|
||||||
|
|
||||||
check-schema-delta:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
|
||||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
runs-on: ubuntu-latest
|
||||||
with:
|
strategy:
|
||||||
typechecking-extras: "all"
|
matrix:
|
||||||
|
toxenv:
|
||||||
|
- "check-sampleconfig"
|
||||||
|
- "check_codestyle"
|
||||||
|
- "check_isort"
|
||||||
|
- "mypy"
|
||||||
|
- "packaging"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
- run: pip install tox
|
||||||
|
- run: tox -e ${{ matrix.toxenv }}
|
||||||
|
|
||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -65,117 +43,81 @@ jobs:
|
|||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: pip install tox
|
||||||
- run: scripts-dev/check-newsfragment.sh
|
- run: scripts-dev/check-newsfragment
|
||||||
env:
|
env:
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||||
|
|
||||||
lint-pydantic:
|
lint-sdist:
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
extras: "all"
|
|
||||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
|
||||||
|
|
||||||
lint-clippy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: 1.61.0
|
|
||||||
override: true
|
|
||||||
components: clippy
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo clippy
|
|
||||||
|
|
||||||
lint-rustfmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: 1.61.0
|
|
||||||
override: true
|
|
||||||
components: rustfmt
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo fmt --check
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
|
||||||
linting-done:
|
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
|
||||||
needs:
|
|
||||||
- lint
|
|
||||||
- lint-crlf
|
|
||||||
- lint-newsfile
|
|
||||||
- lint-pydantic
|
|
||||||
- check-sampleconfig
|
|
||||||
- check-schema-delta
|
|
||||||
- lint-clippy
|
|
||||||
- lint-rustfmt
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: "true"
|
|
||||||
|
|
||||||
calculate-test-jobs:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
- id: get-matrix
|
with:
|
||||||
run: .ci/scripts/calculate_jobs.py
|
python-version: "3.x"
|
||||||
outputs:
|
- run: pip install wheel
|
||||||
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
- run: python setup.py sdist bdist_wheel
|
||||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: Python Distributions
|
||||||
|
path: dist/*
|
||||||
|
|
||||||
|
# Dummy step to gate other tests on without repeating the whole list
|
||||||
|
linting-done:
|
||||||
|
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||||
|
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: "true"
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: calculate-test-jobs
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||||
|
database: ["sqlite"]
|
||||||
|
toxenv: ["py"]
|
||||||
|
include:
|
||||||
|
# Newest Python without optional deps
|
||||||
|
- python-version: "3.10"
|
||||||
|
toxenv: "py-noextras"
|
||||||
|
|
||||||
|
# Oldest Python with PostgreSQL
|
||||||
|
- python-version: "3.6"
|
||||||
|
database: "postgres"
|
||||||
|
postgres-version: "9.6"
|
||||||
|
toxenv: "py"
|
||||||
|
|
||||||
|
# Newest Python with newest PostgreSQL
|
||||||
|
- python-version: "3.10"
|
||||||
|
database: "postgres"
|
||||||
|
postgres-version: "14"
|
||||||
|
toxenv: "py"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.postgres-version }}
|
||||||
run: |
|
run: |
|
||||||
docker run -d -p 5432:5432 \
|
docker run -d -p 5432:5432 \
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.job.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
extras: ${{ matrix.job.extras }}
|
- run: pip install tox
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.postgres-version }}
|
||||||
timeout-minutes: 2
|
timeout-minutes: 2
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
run: until pg_isready -h localhost; do sleep 1; done
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: tox -e ${{ matrix.toxenv }}
|
||||||
env:
|
env:
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
TRIAL_FLAGS: "--jobs=2"
|
||||||
|
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
SYNAPSE_POSTGRES_HOST: localhost
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
@@ -193,57 +135,18 @@ jobs:
|
|||||||
|| true
|
|| true
|
||||||
|
|
||||||
trial-olddeps:
|
trial-olddeps:
|
||||||
# Note: sqlite only; no postgres
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- name: Test with old deps
|
||||||
- name: Install Rust
|
uses: docker://ubuntu:bionic # For old python and sqlite
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
with:
|
||||||
toolchain: 1.61.0
|
workdir: /github/workspace
|
||||||
override: true
|
entrypoint: .ci/scripts/test_old_deps.sh
|
||||||
- uses: Swatinem/rust-cache@v2
|
env:
|
||||||
|
TRIAL_FLAGS: "--jobs=2"
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
|
||||||
# their build dependencies
|
|
||||||
- run: |
|
|
||||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
|
|
||||||
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
|
||||||
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
|
||||||
# otherwise the `poetry install` step will error due to the poetry.lock
|
|
||||||
# file being outdated.
|
|
||||||
#
|
|
||||||
# This caches the output of `Prepare old deps`, which should generate the
|
|
||||||
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
|
||||||
- uses: actions/cache@v3
|
|
||||||
id: cache-poetry-old-deps
|
|
||||||
name: Cache poetry.lock
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
poetry.lock
|
|
||||||
pyproject.toml
|
|
||||||
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
|
||||||
- name: Prepare old deps
|
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
|
||||||
run: .ci/scripts/prepare_old_deps.sh
|
|
||||||
|
|
||||||
# We only now install poetry so that `setup-python-poetry` caches the
|
|
||||||
# right poetry.lock's dependencies.
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: '3.7'
|
|
||||||
extras: "all test"
|
|
||||||
|
|
||||||
- run: poetry run trial -j2 tests
|
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
@@ -259,24 +162,23 @@ jobs:
|
|||||||
|
|
||||||
trial-pypy:
|
trial-pypy:
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
# Very slow; only run if the branch name includes 'pypy'
|
||||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.7"]
|
python-version: ["pypy-3.6"]
|
||||||
extras: ["all"]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
extras: ${{ matrix.extras }}
|
- run: pip install tox
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: tox -e py
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "--jobs=2"
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
@@ -292,37 +194,50 @@ jobs:
|
|||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs: calculate-test-jobs
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
env:
|
env:
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
POSTGRES: ${{ matrix.postgres && 1}}
|
||||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
WORKERS: ${{ matrix.workers && 1 }}
|
||||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
REDIS: ${{ matrix.redis && 1 }}
|
||||||
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
TOP: ${{ github.workspace }}
|
TOP: ${{ github.workspace }}
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
include:
|
||||||
|
- sytest-tag: bionic
|
||||||
|
|
||||||
|
- sytest-tag: bionic
|
||||||
|
postgres: postgres
|
||||||
|
|
||||||
|
- sytest-tag: testing
|
||||||
|
postgres: postgres
|
||||||
|
|
||||||
|
- sytest-tag: bionic
|
||||||
|
postgres: multi-postgres
|
||||||
|
workers: workers
|
||||||
|
|
||||||
|
- sytest-tag: buster
|
||||||
|
postgres: multi-postgres
|
||||||
|
workers: workers
|
||||||
|
|
||||||
|
- sytest-tag: buster
|
||||||
|
postgres: postgres
|
||||||
|
workers: workers
|
||||||
|
redis: redis
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: 1.61.0
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
working-directory: /src
|
working-directory: /src
|
||||||
@@ -333,7 +248,7 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
path: |
|
path: |
|
||||||
/logs/results.tap
|
/logs/results.tap
|
||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
@@ -362,9 +277,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
extras: "postgres"
|
python-version: "3.9"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
|
|
||||||
portdb:
|
portdb:
|
||||||
@@ -376,8 +291,8 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.7"
|
- python-version: "3.6"
|
||||||
postgres-version: "10"
|
postgres-version: "9.6"
|
||||||
|
|
||||||
- python-version: "3.10"
|
- python-version: "3.10"
|
||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
@@ -399,29 +314,24 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
extras: "postgres"
|
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
- run: .ci/scripts/test_synapse_port_db.sh
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
strategy:
|
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
||||||
fail-fast: false
|
image: matrixdotorg/complement:latest
|
||||||
matrix:
|
env:
|
||||||
include:
|
CI: true
|
||||||
- arrangement: monolith
|
ports:
|
||||||
database: SQLite
|
- 8448:8448
|
||||||
|
volumes:
|
||||||
- arrangement: monolith
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v2 for synapse
|
- name: Run actions/checkout@v2 for synapse
|
||||||
@@ -429,60 +339,79 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
# Attempt to check out the same branch of Complement as the PR. If it
|
||||||
uses: actions-rs/toolchain@v1
|
# doesn't exist, fallback to master.
|
||||||
with:
|
- name: Checkout complement
|
||||||
toolchain: 1.61.0
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
|
||||||
shell: bash
|
shell: bash
|
||||||
name: Run Complement Tests
|
run: |
|
||||||
|
mkdir -p complement
|
||||||
|
# Attempt to use the version of complement which best matches the current
|
||||||
|
# build. Depending on whether this is a PR or release, etc. we need to
|
||||||
|
# use different fallbacks.
|
||||||
|
#
|
||||||
|
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
||||||
|
# for pull requests, otherwise GITHUB_REF).
|
||||||
|
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||||
|
# (GITHUB_BASE_REF for pull requests).
|
||||||
|
# 3. Use the default complement branch ("master").
|
||||||
|
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
|
||||||
|
# Skip empty branch names and merge commits.
|
||||||
|
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
cargo-test:
|
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
done
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- linting-done
|
|
||||||
- changes
|
|
||||||
|
|
||||||
steps:
|
# Build initial Synapse image
|
||||||
- uses: actions/checkout@v2
|
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||||
|
working-directory: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
# Build a ready-to-run Synapse image based on the initial image above.
|
||||||
uses: actions-rs/toolchain@v1
|
# This new image includes a config file, keys for signing and TLS, and
|
||||||
with:
|
# other settings to make it suitable for testing under Complement.
|
||||||
toolchain: 1.61.0
|
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||||
override: true
|
working-directory: complement/dockerfiles
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- run: cargo test
|
# Run Complement
|
||||||
|
- run: go test -v -tags synapse_blacklist,msc2403 ./tests/...
|
||||||
|
env:
|
||||||
|
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||||
|
working-directory: complement
|
||||||
|
|
||||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||||
tests-done:
|
tests-done:
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
needs:
|
needs:
|
||||||
|
- lint
|
||||||
|
- lint-crlf
|
||||||
|
- lint-newsfile
|
||||||
|
- lint-sdist
|
||||||
- trial
|
- trial
|
||||||
- trial-olddeps
|
- trial-olddeps
|
||||||
- sytest
|
- sytest
|
||||||
- export-data
|
|
||||||
- portdb
|
- portdb
|
||||||
- complement
|
- complement
|
||||||
- cargo-test
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@v2
|
- name: Set build result
|
||||||
with:
|
env:
|
||||||
needs: ${{ toJSON(needs) }}
|
NEEDS_CONTEXT: ${{ toJSON(needs) }}
|
||||||
|
# the `jq` incantation dumps out a series of "<job> <result>" lines.
|
||||||
|
# we set it to an intermediate variable to avoid a pipe, which makes it
|
||||||
|
# hard to set $rc.
|
||||||
|
run: |
|
||||||
|
rc=0
|
||||||
|
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
|
||||||
|
while read job result ; do
|
||||||
|
# The newsfile lint may be skipped on non PR builds
|
||||||
|
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
# The newsfile lint may be skipped on non PR builds
|
if [ "$result" != "success" ]; then
|
||||||
# Cargo test is skipped if there is no changes on Rust code
|
echo "::set-failed ::Job $job returned $result"
|
||||||
skippable: |
|
rc=1
|
||||||
lint-newsfile
|
fi
|
||||||
cargo-test
|
done <<< $results
|
||||||
|
exit $rc
|
||||||
|
|||||||
28
.github/workflows/triage-incoming.yml
vendored
28
.github/workflows/triage-incoming.yml
vendored
@@ -1,28 +0,0 @@
|
|||||||
name: Move new issues into the issue triage board
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [ opened ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
add_new_issues:
|
|
||||||
name: Add new issues to the triage board
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: octokit/graphql-action@v2.x
|
|
||||||
id: add_to_project
|
|
||||||
with:
|
|
||||||
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
|
||||||
query: |
|
|
||||||
mutation add_to_project($projectid:ID!,$contentid:ID!) {
|
|
||||||
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
|
|
||||||
item {
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
projectid: ${{ env.PROJECT_ID }}
|
|
||||||
contentid: ${{ github.event.issue.node_id }}
|
|
||||||
env:
|
|
||||||
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
44
.github/workflows/triage_labelled.yml
vendored
44
.github/workflows/triage_labelled.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
name: Move labelled issues to correct projects
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [ labeled ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
move_needs_info:
|
|
||||||
name: Move X-Needs-Info on the triage board
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: >
|
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
|
||||||
steps:
|
|
||||||
- uses: octokit/graphql-action@v2.x
|
|
||||||
id: add_to_project
|
|
||||||
with:
|
|
||||||
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
|
||||||
query: |
|
|
||||||
mutation {
|
|
||||||
updateProjectV2ItemFieldValue(
|
|
||||||
input: {
|
|
||||||
projectId: $projectid
|
|
||||||
itemId: $contentid
|
|
||||||
fieldId: $fieldid
|
|
||||||
value: {
|
|
||||||
singleSelectOptionId: "Todo"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
) {
|
|
||||||
projectV2Item {
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
projectid: ${{ env.PROJECT_ID }}
|
|
||||||
contentid: ${{ github.event.issue.node_id }}
|
|
||||||
fieldid: ${{ env.FIELD_ID }}
|
|
||||||
optionid: ${{ env.OPTION_ID }}
|
|
||||||
env:
|
|
||||||
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
FIELD_ID: "PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4"
|
|
||||||
OPTION_ID: "ba22e43c"
|
|
||||||
118
.github/workflows/twisted_trunk.yml
vendored
118
.github/workflows/twisted_trunk.yml
vendored
@@ -6,35 +6,16 @@ on:
|
|||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
mypy:
|
mypy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
- name: Install Rust
|
- run: .ci/patch_for_twisted_trunk.sh
|
||||||
uses: actions-rs/toolchain@v1
|
- run: pip install tox
|
||||||
with:
|
- run: tox -e mypy
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
extras: "all"
|
|
||||||
- run: |
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
|
||||||
- run: poetry run mypy
|
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -42,23 +23,14 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
python-version: 3.6
|
||||||
override: true
|
- run: .ci/patch_for_twisted_trunk.sh
|
||||||
- uses: Swatinem/rust-cache@v2
|
- run: pip install tox
|
||||||
|
- run: tox -e py
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
env:
|
||||||
with:
|
TRIAL_FLAGS: "--jobs=2"
|
||||||
python-version: "3.x"
|
|
||||||
extras: "all test"
|
|
||||||
- run: |
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
- run: poetry run trial --jobs 2 tests
|
|
||||||
|
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
@@ -82,32 +54,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
run: .ci/patch_for_twisted_trunk.sh
|
||||||
# but the sytest-synapse container expects it to be in /venv/.
|
|
||||||
# We symlink it before running poetry so that poetry actually
|
|
||||||
# ends up installing to `/venv`.
|
|
||||||
run: |
|
|
||||||
ln -s -T /venv /src/.venv
|
|
||||||
poetry remove twisted
|
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry install --no-interaction --extras "all test"
|
|
||||||
working-directory: /src
|
working-directory: /src
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
working-directory: /src
|
working-directory: /src
|
||||||
env:
|
|
||||||
# Use offline mode to avoid reinstalling the pinned version of
|
|
||||||
# twisted.
|
|
||||||
OFFLINE: 1
|
|
||||||
- name: Summarise results.tap
|
- name: Summarise results.tap
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
@@ -120,51 +72,6 @@ jobs:
|
|||||||
/logs/results.tap
|
/logs/results.tap
|
||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
|
|
||||||
complement:
|
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- arrangement: monolith
|
|
||||||
database: SQLite
|
|
||||||
|
|
||||||
- arrangement: monolith
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
- arrangement: workers
|
|
||||||
database: Postgres
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
|
||||||
|
|
||||||
# This step is specific to the 'Twisted trunk' test run:
|
|
||||||
- name: Patch dependencies
|
|
||||||
run: |
|
|
||||||
set -x
|
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
|
||||||
pipx install poetry==1.1.14
|
|
||||||
|
|
||||||
poetry remove -n twisted
|
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
|
||||||
poetry lock --no-update
|
|
||||||
# NOT IN 1.1.14 poetry lock --check
|
|
||||||
working-directory: synapse
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
# open an issue if the build fails, so we know about it.
|
||||||
open-issue:
|
open-issue:
|
||||||
if: failure()
|
if: failure()
|
||||||
@@ -172,7 +79,6 @@ jobs:
|
|||||||
- mypy
|
- mypy
|
||||||
- trial
|
- trial
|
||||||
- sytest
|
- sytest
|
||||||
- complement
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
|||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -15,9 +15,6 @@ _trial_temp*/
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
|
||||||
# We do want the poetry lockfile.
|
|
||||||
!poetry.lock
|
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
/*.log
|
/*.log
|
||||||
@@ -33,9 +30,6 @@ __pycache__/
|
|||||||
/media_store/
|
/media_store/
|
||||||
/uploads
|
/uploads
|
||||||
|
|
||||||
# For direnv users
|
|
||||||
/.envrc
|
|
||||||
|
|
||||||
# IDEs
|
# IDEs
|
||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
@@ -56,14 +50,3 @@ __pycache__/
|
|||||||
|
|
||||||
# docs
|
# docs
|
||||||
book/
|
book/
|
||||||
|
|
||||||
# complement
|
|
||||||
/complement-*
|
|
||||||
/master.tar.gz
|
|
||||||
|
|
||||||
# rust
|
|
||||||
/target/
|
|
||||||
/synapse/*.so
|
|
||||||
|
|
||||||
# Poetry will create a setup.py, which we don't want to include.
|
|
||||||
/setup.py
|
|
||||||
|
|||||||
10674
CHANGES.md
10674
CHANGES.md
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
|||||||
# We make the whole Synapse folder a workspace so that we can run `cargo`
|
|
||||||
# commands from the root (rather than having to cd into rust/).
|
|
||||||
|
|
||||||
[workspace]
|
|
||||||
members = ["rust"]
|
|
||||||
56
MANIFEST.in
Normal file
56
MANIFEST.in
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
include synctl
|
||||||
|
include LICENSE
|
||||||
|
include VERSION
|
||||||
|
include *.rst
|
||||||
|
include *.md
|
||||||
|
include demo/README
|
||||||
|
include demo/demo.tls.dh
|
||||||
|
include demo/*.py
|
||||||
|
include demo/*.sh
|
||||||
|
|
||||||
|
include synapse/py.typed
|
||||||
|
recursive-include synapse/storage *.sql
|
||||||
|
recursive-include synapse/storage *.sql.postgres
|
||||||
|
recursive-include synapse/storage *.sql.sqlite
|
||||||
|
recursive-include synapse/storage *.py
|
||||||
|
recursive-include synapse/storage *.txt
|
||||||
|
recursive-include synapse/storage *.md
|
||||||
|
|
||||||
|
recursive-include docs *
|
||||||
|
recursive-include scripts *
|
||||||
|
recursive-include scripts-dev *
|
||||||
|
recursive-include synapse *.pyi
|
||||||
|
recursive-include tests *.py
|
||||||
|
recursive-include tests *.pem
|
||||||
|
recursive-include tests *.p8
|
||||||
|
recursive-include tests *.crt
|
||||||
|
recursive-include tests *.key
|
||||||
|
|
||||||
|
recursive-include synapse/res *
|
||||||
|
recursive-include synapse/static *.css
|
||||||
|
recursive-include synapse/static *.gif
|
||||||
|
recursive-include synapse/static *.html
|
||||||
|
recursive-include synapse/static *.js
|
||||||
|
|
||||||
|
exclude .codecov.yml
|
||||||
|
exclude .coveragerc
|
||||||
|
exclude .dockerignore
|
||||||
|
exclude .editorconfig
|
||||||
|
exclude Dockerfile
|
||||||
|
exclude mypy.ini
|
||||||
|
exclude sytest-blacklist
|
||||||
|
exclude test_postgresql.sh
|
||||||
|
|
||||||
|
include book.toml
|
||||||
|
include pyproject.toml
|
||||||
|
recursive-include changelog.d *
|
||||||
|
|
||||||
|
prune .circleci
|
||||||
|
prune .github
|
||||||
|
prune .ci
|
||||||
|
prune contrib
|
||||||
|
prune debian
|
||||||
|
prune demo/etc
|
||||||
|
prune docker
|
||||||
|
prune snap
|
||||||
|
prune stubs
|
||||||
470
README.rst
470
README.rst
@@ -2,70 +2,152 @@
|
|||||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||||
=========================================================================
|
=========================================================================
|
||||||
|
|
||||||
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
|
|
||||||
maintained by the Matrix.org Foundation. We began rapid development in 2014,
|
|
||||||
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
|
|
||||||
in earnest today.
|
|
||||||
|
|
||||||
Briefly, Matrix is an open standard for communications on the internet, supporting
|
|
||||||
federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
|
|
||||||
Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal specification
|
|
||||||
<https://spec.matrix.org/>`_ describes the technical details.
|
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
Installing and configuration
|
Introduction
|
||||||
============================
|
============
|
||||||
|
|
||||||
The Synapse documentation describes `how to install Synapse <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
Matrix is an ambitious new ecosystem for open federated Instant Messaging and
|
||||||
`Docker images <https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
VoIP. The basics you need to know to get up and running are:
|
||||||
<https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
|
||||||
|
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
||||||
|
exist on any single server. Rooms can be located using convenience aliases
|
||||||
|
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
||||||
|
|
||||||
|
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
||||||
|
you will normally refer to yourself and others using a third party identifier
|
||||||
|
(3PID): email address, phone number, etc rather than manipulating Matrix user IDs)
|
||||||
|
|
||||||
|
The overall architecture is::
|
||||||
|
|
||||||
|
client <----> homeserver <=====================> homeserver <----> client
|
||||||
|
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
||||||
|
|
||||||
|
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||||
|
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
||||||
|
via IRC bridge at irc://irc.libera.chat/matrix.
|
||||||
|
|
||||||
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
|
|
||||||
|
About Matrix
|
||||||
|
============
|
||||||
|
|
||||||
|
Matrix specifies a set of pragmatic RESTful HTTP JSON APIs as an open standard,
|
||||||
|
which handle:
|
||||||
|
|
||||||
|
- Creating and managing fully distributed chat rooms with no
|
||||||
|
single points of control or failure
|
||||||
|
- Eventually-consistent cryptographically secure synchronisation of room
|
||||||
|
state across a global open network of federated servers and services
|
||||||
|
- Sending and receiving extensible messages in a room with (optional)
|
||||||
|
end-to-end encryption
|
||||||
|
- Inviting, joining, leaving, kicking, banning room members
|
||||||
|
- Managing user accounts (registration, login, logout)
|
||||||
|
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
||||||
|
Facebook accounts to authenticate, identify and discover users on Matrix.
|
||||||
|
- Placing 1:1 VoIP and Video calls
|
||||||
|
|
||||||
|
These APIs are intended to be implemented on a wide range of servers, services
|
||||||
|
and clients, letting developers build messaging and VoIP functionality on top
|
||||||
|
of the entirely open Matrix ecosystem rather than using closed or proprietary
|
||||||
|
solutions. The hope is for Matrix to act as the building blocks for a new
|
||||||
|
generation of fully open and interoperable messaging and VoIP apps for the
|
||||||
|
internet.
|
||||||
|
|
||||||
|
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||||
|
team, written in Python 3/Twisted.
|
||||||
|
|
||||||
|
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||||
|
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||||
|
user account information - much as a mail client connects through to an
|
||||||
|
IMAP/SMTP server. Just like email, you can either run your own Matrix
|
||||||
|
homeserver and control and own your own communications and history or use one
|
||||||
|
hosted by someone else (e.g. matrix.org) - there is no single point of control
|
||||||
|
or mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts,
|
||||||
|
etc.
|
||||||
|
|
||||||
|
We'd like to invite you to join #matrix:matrix.org (via
|
||||||
|
https://matrix.org/docs/projects/try-matrix-now.html), run a homeserver, take a look
|
||||||
|
at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
|
||||||
|
`APIs <https://matrix.org/docs/api>`_ and `Client SDKs
|
||||||
|
<https://matrix.org/docs/projects/try-matrix-now.html#client-sdks>`_.
|
||||||
|
|
||||||
|
Thanks for using Matrix!
|
||||||
|
|
||||||
|
Support
|
||||||
|
=======
|
||||||
|
|
||||||
|
For support installing or managing Synapse, please join |room|_ (from a matrix.org
|
||||||
|
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||||
|
support requests, only for bug reports and feature requests.
|
||||||
|
|
||||||
|
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
||||||
|
with its source available in |docs|_.
|
||||||
|
|
||||||
|
.. |room| replace:: ``#synapse:matrix.org``
|
||||||
|
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||||
|
|
||||||
|
.. |docs| replace:: ``docs``
|
||||||
|
.. _docs: docs
|
||||||
|
|
||||||
|
Synapse Installation
|
||||||
|
====================
|
||||||
|
|
||||||
.. _federation:
|
.. _federation:
|
||||||
|
|
||||||
Synapse has a variety of `config options
|
* For details on how to install synapse, see
|
||||||
<https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
`Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
|
||||||
which can be used to customise its behaviour after installation.
|
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
||||||
There are additional details on how to `configure Synapse for federation here
|
|
||||||
<https://matrix-org.github.io/synapse/latest/federate.html>`_.
|
|
||||||
|
|
||||||
.. _reverse-proxy:
|
|
||||||
|
|
||||||
Using a reverse proxy with Synapse
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
It is recommended to put a reverse proxy such as
|
|
||||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
|
||||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
|
||||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
|
||||||
`HAProxy <https://www.haproxy.org/>`_ or
|
|
||||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
|
||||||
doing so is that it means that you can expose the default https port (443) to
|
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
|
||||||
For information on configuring one, see `the reverse proxy docs
|
|
||||||
<https://matrix-org.github.io/synapse/latest/reverse_proxy.html>`_.
|
|
||||||
|
|
||||||
Upgrading an existing Synapse
|
|
||||||
-----------------------------
|
|
||||||
|
|
||||||
The instructions for upgrading Synapse are in `the upgrade notes`_.
|
|
||||||
Please check these instructions as upgrading may require extra steps for some
|
|
||||||
versions of Synapse.
|
|
||||||
|
|
||||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
|
||||||
|
|
||||||
|
|
||||||
Platform dependencies
|
Connecting to Synapse from a client
|
||||||
---------------------
|
===================================
|
||||||
|
|
||||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
The easiest way to try out your new Synapse installation is by connecting to it
|
||||||
and aims to follow supported upstream versions. See the
|
from a web client.
|
||||||
`deprecation policy <https://matrix-org.github.io/synapse/latest/deprecation_policy.html>`_
|
|
||||||
for more details.
|
|
||||||
|
|
||||||
|
Unless you are running a test instance of Synapse on your local machine, in
|
||||||
|
general, you will need to enable TLS support before you can successfully
|
||||||
|
connect from a client: see
|
||||||
|
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||||
|
|
||||||
|
An easy way to get started is to login or register via Element at
|
||||||
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
|
If you prefer to use another client, refer to our
|
||||||
|
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||||
|
|
||||||
|
If all goes well you should at least be able to log in, create a room, and
|
||||||
|
start sending messages.
|
||||||
|
|
||||||
|
.. _`client-user-reg`:
|
||||||
|
|
||||||
|
Registering a new user from a client
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
By default, registration of new users via Matrix clients is disabled. To enable
|
||||||
|
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
||||||
|
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
|
||||||
|
|
||||||
|
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
||||||
|
user via a Matrix client.
|
||||||
|
|
||||||
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
|
from a localpart you specify when you create the account. Your name will take
|
||||||
|
the form of::
|
||||||
|
|
||||||
|
@localpart:my.domain.name
|
||||||
|
|
||||||
|
(pronounced "at localpart on my dot domain dot name").
|
||||||
|
|
||||||
|
As when logging in, you will need to specify a "Custom server". Specify your
|
||||||
|
desired ``localpart`` in the 'User name' box.
|
||||||
|
|
||||||
Security note
|
Security note
|
||||||
-------------
|
=============
|
||||||
|
|
||||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
||||||
repository endpoints`_.
|
repository endpoints`_.
|
||||||
@@ -105,76 +187,30 @@ Following this advice ensures that even if an XSS is found in Synapse, the
|
|||||||
impact to other applications will be minimal.
|
impact to other applications will be minimal.
|
||||||
|
|
||||||
|
|
||||||
Testing a new installation
|
Upgrading an existing Synapse
|
||||||
==========================
|
=============================
|
||||||
|
|
||||||
The easiest way to try out your new Synapse installation is by connecting to it
|
The instructions for upgrading synapse are in `the upgrade notes`_.
|
||||||
from a web client.
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
|
versions of synapse.
|
||||||
|
|
||||||
Unless you are running a test instance of Synapse on your local machine, in
|
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
||||||
general, you will need to enable TLS support before you can successfully
|
|
||||||
connect from a client: see
|
|
||||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
.. _reverse-proxy:
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
|
||||||
You will need to change the server you are logging into from ``matrix.org``
|
|
||||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
|
||||||
If you prefer to use another client, refer to our
|
|
||||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
|
||||||
|
|
||||||
If all goes well you should at least be able to log in, create a room, and
|
Using a reverse proxy with Synapse
|
||||||
start sending messages.
|
==================================
|
||||||
|
|
||||||
.. _`client-user-reg`:
|
It is recommended to put a reverse proxy such as
|
||||||
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
|
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||||
|
`HAProxy <https://www.haproxy.org/>`_ or
|
||||||
|
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||||
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
|
||||||
Registering a new user from a client
|
For information on configuring one, see `<docs/reverse_proxy.md>`_.
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
By default, registration of new users via Matrix clients is disabled. To enable
|
|
||||||
it:
|
|
||||||
|
|
||||||
1. In the
|
|
||||||
`registration config section <https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
|
||||||
set ``enable_registration: true`` in ``homeserver.yaml``.
|
|
||||||
2. Then **either**:
|
|
||||||
|
|
||||||
a. set up a `CAPTCHA <https://matrix-org.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
|
||||||
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
|
||||||
|
|
||||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
|
||||||
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
|
||||||
This can be exploited by attackers to create spambots targetting the rest of the Matrix
|
|
||||||
federation.
|
|
||||||
|
|
||||||
Your new user name will be formed partly from the ``server_name``, and partly
|
|
||||||
from a localpart you specify when you create the account. Your name will take
|
|
||||||
the form of::
|
|
||||||
|
|
||||||
@localpart:my.domain.name
|
|
||||||
|
|
||||||
(pronounced "at localpart on my dot domain dot name").
|
|
||||||
|
|
||||||
As when logging in, you will need to specify a "Custom server". Specify your
|
|
||||||
desired ``localpart`` in the 'User name' box.
|
|
||||||
|
|
||||||
Troubleshooting and support
|
|
||||||
===========================
|
|
||||||
|
|
||||||
The `Admin FAQ <https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
|
||||||
includes tips on dealing with some common problems. For more details, see
|
|
||||||
`Synapse's wider documentation <https://matrix-org.github.io/synapse/latest/>`_.
|
|
||||||
|
|
||||||
For additional support installing or managing Synapse, please ask in the community
|
|
||||||
support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
|
|
||||||
issues for support requests, only for bug reports and feature requests.
|
|
||||||
|
|
||||||
.. |room| replace:: ``#synapse:matrix.org``
|
|
||||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
.. |docs| replace:: ``docs``
|
|
||||||
.. _docs: docs
|
|
||||||
|
|
||||||
Identity Servers
|
Identity Servers
|
||||||
================
|
================
|
||||||
@@ -206,15 +242,34 @@ an email address with your account, or send an invite to another user via their
|
|||||||
email address.
|
email address.
|
||||||
|
|
||||||
|
|
||||||
Development
|
Password reset
|
||||||
===========
|
==============
|
||||||
|
|
||||||
|
Users can reset their password through their client. Alternatively, a server admin
|
||||||
|
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
||||||
|
or by directly editing the database as shown below.
|
||||||
|
|
||||||
|
First calculate the hash of the new password::
|
||||||
|
|
||||||
|
$ ~/synapse/env/bin/hash_password
|
||||||
|
Password:
|
||||||
|
Confirm password:
|
||||||
|
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
|
||||||
|
Then update the ``users`` table in the database::
|
||||||
|
|
||||||
|
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||||
|
WHERE name='@test:test.com';
|
||||||
|
|
||||||
|
|
||||||
|
Synapse Development
|
||||||
|
===================
|
||||||
|
|
||||||
We welcome contributions to Synapse from the community!
|
|
||||||
The best place to get started is our
|
The best place to get started is our
|
||||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
||||||
|
information for synapse developers as well as synapse administrators.
|
||||||
|
|
||||||
information for Synapse developers as well as Synapse administrators.
|
|
||||||
Developers might be particularly interested in:
|
Developers might be particularly interested in:
|
||||||
|
|
||||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
||||||
@@ -225,6 +280,181 @@ Alongside all that, join our developer community on Matrix:
|
|||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||||
|
|
||||||
|
|
||||||
|
Quick start
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Before setting up a development environment for synapse, make sure you have the
|
||||||
|
system dependencies (such as the python header files) installed - see
|
||||||
|
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
||||||
|
|
||||||
|
To check out a synapse for development, clone the git repo into a working
|
||||||
|
directory of your choice::
|
||||||
|
|
||||||
|
git clone https://github.com/matrix-org/synapse.git
|
||||||
|
cd synapse
|
||||||
|
|
||||||
|
Synapse has a number of external dependencies, that are easiest
|
||||||
|
to install using pip and a virtualenv::
|
||||||
|
|
||||||
|
python3 -m venv ./env
|
||||||
|
source ./env/bin/activate
|
||||||
|
pip install -e ".[all,dev]"
|
||||||
|
|
||||||
|
This will run a process of downloading and installing all the needed
|
||||||
|
dependencies into a virtual env. If any dependencies fail to install,
|
||||||
|
try installing the failing modules individually::
|
||||||
|
|
||||||
|
pip install -e "module-name"
|
||||||
|
|
||||||
|
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||||
|
|
||||||
|
./demo/start.sh
|
||||||
|
|
||||||
|
(to stop, you can use `./demo/stop.sh`)
|
||||||
|
|
||||||
|
If you just want to start a single instance of the app and run it directly::
|
||||||
|
|
||||||
|
# Create the homeserver.yaml config once
|
||||||
|
python -m synapse.app.homeserver \
|
||||||
|
--server-name my.domain.name \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config \
|
||||||
|
--report-stats=[yes|no]
|
||||||
|
|
||||||
|
# Start the app
|
||||||
|
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
|
|
||||||
|
Running the unit tests
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
After getting up and running, you may wish to run Synapse's unit tests to
|
||||||
|
check that everything is installed correctly::
|
||||||
|
|
||||||
|
trial tests
|
||||||
|
|
||||||
|
This should end with a 'PASSED' result (note that exact numbers will
|
||||||
|
differ)::
|
||||||
|
|
||||||
|
Ran 1337 tests in 716.064s
|
||||||
|
|
||||||
|
PASSED (skips=15, successes=1322)
|
||||||
|
|
||||||
|
For more tips on running the unit tests, like running a specific test or
|
||||||
|
to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Running the Integration Tests
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||||
|
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||||
|
access the API as a Matrix client would. It is able to run Synapse directly from
|
||||||
|
the source tree, so installation of the server is not required.
|
||||||
|
|
||||||
|
Testing with SyTest is recommended for verifying that changes related to the
|
||||||
|
Client-Server API are functioning correctly. See the `SyTest installation
|
||||||
|
instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||||
|
|
||||||
|
|
||||||
|
Platform dependencies
|
||||||
|
=====================
|
||||||
|
|
||||||
|
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||||
|
and aims to follow supported upstream versions. See the
|
||||||
|
`<docs/deprecation_policy.md>`_ document for more details.
|
||||||
|
|
||||||
|
|
||||||
|
Troubleshooting
|
||||||
|
===============
|
||||||
|
|
||||||
|
Need help? Join our community support room on Matrix:
|
||||||
|
`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
|
||||||
|
|
||||||
|
Running out of File Handles
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
If synapse runs out of file handles, it typically fails badly - live-locking
|
||||||
|
at 100% CPU, and/or failing to accept new TCP connections (blocking the
|
||||||
|
connecting client). Matrix currently can legitimately use a lot of file handles,
|
||||||
|
thanks to busy rooms like #matrix:matrix.org containing hundreds of participating
|
||||||
|
servers. The first time a server talks in a room it will try to connect
|
||||||
|
simultaneously to all participating servers, which could exhaust the available
|
||||||
|
file descriptors between DNS queries & HTTPS sockets, especially if DNS is slow
|
||||||
|
to respond. (We need to improve the routing algorithm used to be better than
|
||||||
|
full mesh, but as of March 2019 this hasn't happened yet).
|
||||||
|
|
||||||
|
If you hit this failure mode, we recommend increasing the maximum number of
|
||||||
|
open file handles to be at least 4096 (assuming a default of 1024 or 256).
|
||||||
|
This is typically done by editing ``/etc/security/limits.conf``
|
||||||
|
|
||||||
|
Separately, Synapse may leak file handles if inbound HTTP requests get stuck
|
||||||
|
during processing - e.g. blocked behind a lock or talking to a remote server etc.
|
||||||
|
This is best diagnosed by matching up the 'Received request' and 'Processed request'
|
||||||
|
log lines and looking for any 'Processed request' lines which take more than
|
||||||
|
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
||||||
|
you see this failure mode so we can help debug it, however.
|
||||||
|
|
||||||
|
Help!! Synapse is slow and eats all my RAM/CPU!
|
||||||
|
-----------------------------------------------
|
||||||
|
|
||||||
|
First, ensure you are running the latest version of Synapse, using Python 3
|
||||||
|
with a PostgreSQL database.
|
||||||
|
|
||||||
|
Synapse's architecture is quite RAM hungry currently - we deliberately
|
||||||
|
cache a lot of recent room data and metadata in RAM in order to speed up
|
||||||
|
common requests. We'll improve this in the future, but for now the easiest
|
||||||
|
way to either reduce the RAM usage (at the risk of slowing things down)
|
||||||
|
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
|
||||||
|
variable. The default is 0.5, which can be decreased to reduce RAM usage
|
||||||
|
in memory constrained enviroments, or increased if performance starts to
|
||||||
|
degrade.
|
||||||
|
|
||||||
|
However, degraded performance due to a low cache factor, common on
|
||||||
|
machines with slow disks, often leads to explosions in memory use due
|
||||||
|
backlogged requests. In this case, reducing the cache factor will make
|
||||||
|
things worse. Instead, try increasing it drastically. 2.0 is a good
|
||||||
|
starting value.
|
||||||
|
|
||||||
|
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
||||||
|
improvement in overall memory use, and especially in terms of giving back
|
||||||
|
RAM to the OS. To use it, the library must simply be put in the
|
||||||
|
LD_PRELOAD environment variable when launching Synapse. On Debian, this
|
||||||
|
can be done by installing the ``libjemalloc1`` package and adding this
|
||||||
|
line to ``/etc/default/matrix-synapse``::
|
||||||
|
|
||||||
|
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
||||||
|
|
||||||
|
This can make a significant difference on Python 2.7 - it's unclear how
|
||||||
|
much of an improvement it provides on Python 3.x.
|
||||||
|
|
||||||
|
If you're encountering high CPU use by the Synapse process itself, you
|
||||||
|
may be affected by a bug with presence tracking that leads to a
|
||||||
|
massive excess of outgoing federation requests (see `discussion
|
||||||
|
<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
|
||||||
|
indicate that your server is also issuing far more outgoing federation
|
||||||
|
requests than can be accounted for by your users' activity, this is a
|
||||||
|
likely cause. The misbehavior can be worked around by setting
|
||||||
|
the following in the Synapse config file:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
presence:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
People can't accept room invitations from me
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
The typical failure mode here is that you send an invitation to someone
|
||||||
|
to join a room or direct chat, but when they go to accept it, they get an
|
||||||
|
error (typically along the lines of "Invalid signature"). They might see
|
||||||
|
something like the following in their logs::
|
||||||
|
|
||||||
|
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
|
||||||
|
|
||||||
|
This is normally caused by a misconfiguration in your reverse-proxy. See
|
||||||
|
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
||||||
:alt: (get support on #synapse:matrix.org)
|
:alt: (get support on #synapse:matrix.org)
|
||||||
:target: https://matrix.to/#/#synapse:matrix.org
|
:target: https://matrix.to/#/#synapse:matrix.org
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
# A build script for poetry that adds the rust extension.
|
|
||||||
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from setuptools_rust import Binding, RustExtension
|
|
||||||
|
|
||||||
|
|
||||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
|
||||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
|
||||||
|
|
||||||
extension = RustExtension(
|
|
||||||
target="synapse.synapse_rust",
|
|
||||||
path=cargo_toml_path,
|
|
||||||
binding=Binding.PyO3,
|
|
||||||
py_limited_api=True,
|
|
||||||
)
|
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
|
||||||
setup_kwargs["zip_safe"] = False
|
|
||||||
1
changelog.d/10520.misc
Normal file
1
changelog.d/10520.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Send and handle cross-signing messages using the stable prefix.
|
||||||
1
changelog.d/11331.misc
Normal file
1
changelog.d/11331.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
A test helper (`wait_for_background_updates`) no longer depends on classes defining a `store` property.
|
||||||
1
changelog.d/11358.feature
Normal file
1
changelog.d/11358.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add an admin API endpoint to force a local user to leave all non-public rooms in a space.
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Note that `libpq` is required on ARM-based Macs.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a bug introduced in Synapse v1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add admin APIs to fetch messages within a particular window of time.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Cancel the processing of key query requests when they time out.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Improve validation of request bodies for the following client-server API endpoints: [`/account/3pid/msisdn/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidmsisdnrequesttoken) and [`/org.matrix.msc3720/account_status`](https://github.com/matrix-org/matrix-spec-proposals/blob/babolivier/user_status/proposals/3720-account-status.md#post-_matrixclientv1account_status).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add & populate `event_stream_ordering` column on receipts table for future optimisation of push action processing. Contributed by Nick @ Beeper (@fizzadar).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Rename the `EventFormatVersions` enum values so that they line up with room version numbers.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Update trial old deps CI to use poetry 1.2.0.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add experimental configuration option to allow disabling legacy Prometheus metric names.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add experimental configuration option to allow disabling legacy Prometheus metric names.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add experimental configuration option to allow disabling legacy Prometheus metric names.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix typechecking with latest types-jsonschema.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Update trial old deps CI to use poetry 1.2.0.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a mistake in the config manual: the `event_cache_size` _is_ scaled by `caches.global_factor`. The documentation was incorrect since Synapse 1.22.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a typo in the documentation for the login ratelimiting configuration.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Define Synapse's compatability policy for SQLite versions.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Strip number suffix from instance name to consolidate services that traces are spread over.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Instrument `get_metadata_for_events` for understandable traces in Jaeger.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a bug where Synapse fails to start if a signing key file contains an empty line.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Document the timestamp when a user accepts the consent, if [consent tracking](https://matrix-org.github.io/synapse/latest/consent_tracking.html) is used.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Remove old queries to join room memberships to current state events. Contributed by Nick @ Beeper (@fizzadar).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a long standing bug where Synapse would fail to handle malformed user IDs or room aliases gracefully in certain cases.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Avoid raising an error due to malformed user IDs in `get_current_hosts_in_room`. Malformed user IDs cannot currently join a room, so this error would not be hit.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a long standing bug where device lists would remain cached when remote users left and rejoined the last room shared with the local homeserver.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Update the docstrings for `get_users_in_room` and `get_current_hosts_in_room` to explain the impact of partial state.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
User an additional database query when persisting receipts.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Re-type hint some collections as read-only.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Remove unused Prometheus recording rules from `synapse-v2.rules` and add comments describing where the rest are used.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a check for editable installs if the Rust library needs rebuilding.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Synapse will now refuse to start if configured to use SQLite < 3.27.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Tag traces with the instance name to be able to easily jump into the right logs and filter traces by instance.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Concurrently fetch room push actions when calculating badge counts. Contributed by Nick @ Beeper (@fizzadar).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a long-standing bug where the `cache_invalidation_stream_seq` sequence would begin at 1 instead of 2.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Update the script which makes full schema dumps.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a stub Rust crate.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Simplify the dependency DAG in the tests workflow.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a long-standing spec compliance bug where Synapse would accept a trailing slash on the end of `/get_missing_events` federation requests.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
complement tests: put postgres data folder on an host path on /tmp that we bindmount, outside of the container storage that can be quite slow.
|
|
||||||
@@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
import argparse
|
import argparse
|
||||||
import binascii
|
|
||||||
import cmd
|
import cmd
|
||||||
import getpass
|
import getpass
|
||||||
import json
|
import json
|
||||||
@@ -27,8 +26,9 @@ import urllib
|
|||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import nacl.encoding
|
||||||
|
import nacl.signing
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
|
||||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, threads
|
from twisted.internet import defer, reactor, threads
|
||||||
@@ -41,6 +41,7 @@ TRUSTED_ID_SERVERS = ["localhost:8001"]
|
|||||||
|
|
||||||
|
|
||||||
class SynapseCmd(cmd.Cmd):
|
class SynapseCmd(cmd.Cmd):
|
||||||
|
|
||||||
"""Basic synapse command-line processor.
|
"""Basic synapse command-line processor.
|
||||||
|
|
||||||
This processes commands from the user and calls the relevant HTTP methods.
|
This processes commands from the user and calls the relevant HTTP methods.
|
||||||
@@ -419,8 +420,8 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
pubKey = None
|
pubKey = None
|
||||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||||
if "public_key" in pubKeyObj:
|
if "public_key" in pubKeyObj:
|
||||||
pubKey = decode_verify_key_bytes(
|
pubKey = nacl.signing.VerifyKey(
|
||||||
NACL_ED25519, binascii.unhexlify(pubKeyObj["public_key"])
|
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print("No public key found in pubkey response!")
|
print("No public key found in pubkey response!")
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ services:
|
|||||||
# failure
|
# failure
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# See the readme for a full documentation of the environment settings
|
# See the readme for a full documentation of the environment settings
|
||||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
|
||||||
environment:
|
environment:
|
||||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -1,125 +0,0 @@
|
|||||||
# Setting up Synapse with Workers using Docker Compose
|
|
||||||
|
|
||||||
This directory describes how deploy and manage Synapse and workers via [Docker Compose](https://docs.docker.com/compose/).
|
|
||||||
|
|
||||||
Example worker configuration files can be found [here](workers).
|
|
||||||
|
|
||||||
All examples and snippets assume that your Synapse service is called `synapse` in your Docker Compose file.
|
|
||||||
|
|
||||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
|
||||||
|
|
||||||
## Worker Service Examples in Docker Compose
|
|
||||||
|
|
||||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
|
||||||
|
|
||||||
### Generic Worker Example
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
synapse-generic-worker-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-generic-worker-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
|
||||||
start_period: "5s"
|
|
||||||
interval: "15s"
|
|
||||||
timeout: "5s"
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
|
||||||
# Expose port if required so your reverse proxy can send requests to this worker
|
|
||||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
|
||||||
ports:
|
|
||||||
- 8081:8081
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
### Federation Sender Example
|
|
||||||
|
|
||||||
Please note: The federation sender does not receive REST API calls so no exposed ports are required.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-federation-sender-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
## `homeserver.yaml` Configuration
|
|
||||||
|
|
||||||
### Enable Redis
|
|
||||||
|
|
||||||
Locate the `redis` section of your `homeserver.yaml` and enable and configure it:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
redis:
|
|
||||||
enabled: true
|
|
||||||
host: redis
|
|
||||||
port: 6379
|
|
||||||
# password: <secret_password>
|
|
||||||
```
|
|
||||||
|
|
||||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
|
||||||
|
|
||||||
### Add a replication Listener
|
|
||||||
|
|
||||||
Locate the `listeners` section of your `homeserver.yaml` and add the following replication listener:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
listeners:
|
|
||||||
# Other listeners
|
|
||||||
|
|
||||||
- port: 9093
|
|
||||||
type: http
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
```
|
|
||||||
|
|
||||||
This listener is used by the workers for replication and is referred to in worker config files using the following settings:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
```
|
|
||||||
|
|
||||||
### Add Workers to `instance_map`
|
|
||||||
|
|
||||||
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
instance_map:
|
|
||||||
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
|
|
||||||
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
|
|
||||||
port: 8034 # The port assigned to the replication listener in your worker config file
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
host: synapse-federation-sender-1
|
|
||||||
port: 8034
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configure Federation Senders
|
|
||||||
|
|
||||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# This will disable federation sending on the main Synapse instance
|
|
||||||
send_federation: false
|
|
||||||
|
|
||||||
federation_sender_instances:
|
|
||||||
- synapse-federation-sender-1 # The worker_name setting in your federation sender worker configuration file
|
|
||||||
```
|
|
||||||
|
|
||||||
## Other Worker types
|
|
||||||
|
|
||||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
networks:
|
|
||||||
backend:
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/var/lib/postgresql/data:/var/lib/postgresql/data:rw
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: synapse
|
|
||||||
POSTGRES_USER: synapse_user
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_INITDB_ARGS: --encoding=UTF8 --locale=C
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
|
|
||||||
synapse:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw
|
|
||||||
ports:
|
|
||||||
- 8008:8008
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
environment:
|
|
||||||
SYNAPSE_CONFIG_DIR: /data
|
|
||||||
SYNAPSE_CONFIG_PATH: /data/homeserver.yaml
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
|
|
||||||
synapse-generic-worker-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-generic-worker-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
|
||||||
start_period: "5s"
|
|
||||||
interval: "15s"
|
|
||||||
timeout: "5s"
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
|
||||||
# Expose port if required so your reverse proxy can send requests to this worker
|
|
||||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
|
||||||
ports:
|
|
||||||
- 8081:8081
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
|
|
||||||
synapse-federation-sender-1:
|
|
||||||
image: matrixdotorg/synapse:latest
|
|
||||||
container_name: synapse-federation-sender-1
|
|
||||||
restart: unless-stopped
|
|
||||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
|
||||||
environment:
|
|
||||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
|
||||||
depends_on:
|
|
||||||
- synapse
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
worker_app: synapse.app.federation_sender
|
|
||||||
worker_name: synapse-federation-sender-1
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_listeners:
|
|
||||||
- type: http
|
|
||||||
port: 8034
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
|
|
||||||
worker_log_config: /data/federation_sender.log.config
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
worker_app: synapse.app.generic_worker
|
|
||||||
worker_name: synapse-generic-worker-1
|
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: synapse
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_listeners:
|
|
||||||
- type: http
|
|
||||||
port: 8034
|
|
||||||
resources:
|
|
||||||
- names: [replication]
|
|
||||||
- type: http
|
|
||||||
port: 8081
|
|
||||||
x_forwarded: true
|
|
||||||
resources:
|
|
||||||
- names: [client, federation]
|
|
||||||
|
|
||||||
worker_log_config: /data/worker.log.config
|
|
||||||
165
contrib/experiments/cursesio.py
Normal file
165
contrib/experiments/cursesio.py
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import curses
|
||||||
|
import curses.wrapper
|
||||||
|
from curses.ascii import isprint
|
||||||
|
|
||||||
|
from twisted.internet import reactor
|
||||||
|
|
||||||
|
|
||||||
|
class CursesStdIO:
|
||||||
|
def __init__(self, stdscr, callback=None):
|
||||||
|
self.statusText = "Synapse test app -"
|
||||||
|
self.searchText = ""
|
||||||
|
self.stdscr = stdscr
|
||||||
|
|
||||||
|
self.logLine = ""
|
||||||
|
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
self._setup()
|
||||||
|
|
||||||
|
def _setup(self):
|
||||||
|
self.stdscr.nodelay(1) # Make non blocking
|
||||||
|
|
||||||
|
self.rows, self.cols = self.stdscr.getmaxyx()
|
||||||
|
self.lines = []
|
||||||
|
|
||||||
|
curses.use_default_colors()
|
||||||
|
|
||||||
|
self.paintStatus(self.statusText)
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def set_callback(self, callback):
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
"""We want to select on FD 0"""
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def connectionLost(self, reason):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def print_line(self, text):
|
||||||
|
"""add a line to the internal list of lines"""
|
||||||
|
|
||||||
|
self.lines.append(text)
|
||||||
|
self.redraw()
|
||||||
|
|
||||||
|
def print_log(self, text):
|
||||||
|
self.logLine = text
|
||||||
|
self.redraw()
|
||||||
|
|
||||||
|
def redraw(self):
|
||||||
|
"""method for redisplaying lines based on internal list of lines"""
|
||||||
|
|
||||||
|
self.stdscr.clear()
|
||||||
|
self.paintStatus(self.statusText)
|
||||||
|
i = 0
|
||||||
|
index = len(self.lines) - 1
|
||||||
|
while i < (self.rows - 3) and index >= 0:
|
||||||
|
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||||
|
i = i + 1
|
||||||
|
index = index - 1
|
||||||
|
|
||||||
|
self.printLogLine(self.logLine)
|
||||||
|
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def paintStatus(self, text):
|
||||||
|
if len(text) > self.cols:
|
||||||
|
raise RuntimeError("TextTooLongError")
|
||||||
|
|
||||||
|
self.stdscr.addstr(
|
||||||
|
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
def printLogLine(self, text):
|
||||||
|
self.stdscr.addstr(
|
||||||
|
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
def doRead(self):
|
||||||
|
"""Input is ready!"""
|
||||||
|
curses.noecho()
|
||||||
|
c = self.stdscr.getch() # read a character
|
||||||
|
|
||||||
|
if c == curses.KEY_BACKSPACE:
|
||||||
|
self.searchText = self.searchText[:-1]
|
||||||
|
|
||||||
|
elif c == curses.KEY_ENTER or c == 10:
|
||||||
|
text = self.searchText
|
||||||
|
self.searchText = ""
|
||||||
|
|
||||||
|
self.print_line(">> %s" % text)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.callback:
|
||||||
|
self.callback.on_line(text)
|
||||||
|
except Exception as e:
|
||||||
|
self.print_line(str(e))
|
||||||
|
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
elif isprint(c):
|
||||||
|
if len(self.searchText) == self.cols - 2:
|
||||||
|
return
|
||||||
|
self.searchText = self.searchText + chr(c)
|
||||||
|
|
||||||
|
self.stdscr.addstr(
|
||||||
|
self.rows - 1,
|
||||||
|
0,
|
||||||
|
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||||
|
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||||
|
self.stdscr.refresh()
|
||||||
|
|
||||||
|
def logPrefix(self):
|
||||||
|
return "CursesStdIO"
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""clean up"""
|
||||||
|
|
||||||
|
curses.nocbreak()
|
||||||
|
self.stdscr.keypad(0)
|
||||||
|
curses.echo()
|
||||||
|
curses.endwin()
|
||||||
|
|
||||||
|
|
||||||
|
class Callback:
|
||||||
|
def __init__(self, stdio):
|
||||||
|
self.stdio = stdio
|
||||||
|
|
||||||
|
def on_line(self, text):
|
||||||
|
self.stdio.print_line(text)
|
||||||
|
|
||||||
|
|
||||||
|
def main(stdscr):
|
||||||
|
screen = CursesStdIO(stdscr) # create Screen object
|
||||||
|
|
||||||
|
callback = Callback(screen)
|
||||||
|
|
||||||
|
screen.set_callback(callback)
|
||||||
|
|
||||||
|
stdscr.refresh()
|
||||||
|
reactor.addReader(screen)
|
||||||
|
reactor.run()
|
||||||
|
screen.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
curses.wrapper(main)
|
||||||
367
contrib/experiments/test_messaging.py
Normal file
367
contrib/experiments/test_messaging.py
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
""" This is an example of using the server to server implementation to do a
|
||||||
|
basic chat style thing. It accepts commands from stdin and outputs to stdout.
|
||||||
|
|
||||||
|
It assumes that ucids are of the form <user>@<domain>, and uses <domain> as
|
||||||
|
the address of the remote home server to hit.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python test_messaging.py <port>
|
||||||
|
|
||||||
|
Currently assumes the local address is localhost:<port>
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import curses.wrapper
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import cursesio
|
||||||
|
|
||||||
|
from twisted.internet import defer, reactor
|
||||||
|
from twisted.python import log
|
||||||
|
|
||||||
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
from synapse.federation import ReplicationHandler
|
||||||
|
from synapse.federation.units import Pdu
|
||||||
|
from synapse.util import origin_from_ucid
|
||||||
|
|
||||||
|
# from synapse.logging.utils import log_function
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("example")
|
||||||
|
|
||||||
|
|
||||||
|
def excpetion_errback(failure):
|
||||||
|
logging.exception(failure)
|
||||||
|
|
||||||
|
|
||||||
|
class InputOutput:
|
||||||
|
"""This is responsible for basic I/O so that a user can interact with
|
||||||
|
the example app.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, screen, user):
|
||||||
|
self.screen = screen
|
||||||
|
self.user = user
|
||||||
|
|
||||||
|
def set_home_server(self, server):
|
||||||
|
self.server = server
|
||||||
|
|
||||||
|
def on_line(self, line):
|
||||||
|
"""This is where we process commands."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
m = re.match(r"^join (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# The `sender` wants to join a room.
|
||||||
|
(room_name,) = m.groups()
|
||||||
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
|
# self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match(r"^invite (\S+) (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# `sender` wants to invite someone to a room
|
||||||
|
room_name, invitee = m.groups()
|
||||||
|
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||||
|
self.server.invite_to_room(room_name, self.user, invitee)
|
||||||
|
# self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match(r"^send (\S+) (.*)$", line)
|
||||||
|
if m:
|
||||||
|
# `sender` wants to message a room
|
||||||
|
room_name, body = m.groups()
|
||||||
|
self.print_line("%s send to %s" % (self.user, room_name))
|
||||||
|
self.server.send_message(room_name, self.user, body)
|
||||||
|
# self.print_line("OK.")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.match(r"^backfill (\S+)$", line)
|
||||||
|
if m:
|
||||||
|
# we want to backfill a room
|
||||||
|
(room_name,) = m.groups()
|
||||||
|
self.print_line("backfill %s" % room_name)
|
||||||
|
self.server.backfill(room_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.print_line("Unrecognized command")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def print_line(self, text):
|
||||||
|
self.screen.print_line(text)
|
||||||
|
|
||||||
|
def print_log(self, text):
|
||||||
|
self.screen.print_log(text)
|
||||||
|
|
||||||
|
|
||||||
|
class IOLoggerHandler(logging.Handler):
|
||||||
|
def __init__(self, io):
|
||||||
|
logging.Handler.__init__(self)
|
||||||
|
self.io = io
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
if record.levelno < logging.WARN:
|
||||||
|
return
|
||||||
|
|
||||||
|
msg = self.format(record)
|
||||||
|
self.io.print_log(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class Room:
|
||||||
|
"""Used to store (in memory) the current membership state of a room, and
|
||||||
|
which home servers we should send PDUs associated with the room to.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, room_name):
|
||||||
|
self.room_name = room_name
|
||||||
|
self.invited = set()
|
||||||
|
self.participants = set()
|
||||||
|
self.servers = set()
|
||||||
|
|
||||||
|
self.oldest_server = None
|
||||||
|
|
||||||
|
self.have_got_metadata = False
|
||||||
|
|
||||||
|
def add_participant(self, participant):
|
||||||
|
"""Someone has joined the room"""
|
||||||
|
self.participants.add(participant)
|
||||||
|
self.invited.discard(participant)
|
||||||
|
|
||||||
|
server = origin_from_ucid(participant)
|
||||||
|
self.servers.add(server)
|
||||||
|
|
||||||
|
if not self.oldest_server:
|
||||||
|
self.oldest_server = server
|
||||||
|
|
||||||
|
def add_invited(self, invitee):
|
||||||
|
"""Someone has been invited to the room"""
|
||||||
|
self.invited.add(invitee)
|
||||||
|
self.servers.add(origin_from_ucid(invitee))
|
||||||
|
|
||||||
|
|
||||||
|
class HomeServer(ReplicationHandler):
|
||||||
|
"""A very basic home server implentation that allows people to join a
|
||||||
|
room and then invite other people.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, server_name, replication_layer, output):
|
||||||
|
self.server_name = server_name
|
||||||
|
self.replication_layer = replication_layer
|
||||||
|
self.replication_layer.set_handler(self)
|
||||||
|
|
||||||
|
self.joined_rooms = {}
|
||||||
|
|
||||||
|
self.output = output
|
||||||
|
|
||||||
|
def on_receive_pdu(self, pdu):
|
||||||
|
"""We just received a PDU"""
|
||||||
|
pdu_type = pdu.pdu_type
|
||||||
|
|
||||||
|
if pdu_type == "sy.room.message":
|
||||||
|
self._on_message(pdu)
|
||||||
|
elif pdu_type == "sy.room.member" and "membership" in pdu.content:
|
||||||
|
if pdu.content["membership"] == "join":
|
||||||
|
self._on_join(pdu.context, pdu.state_key)
|
||||||
|
elif pdu.content["membership"] == "invite":
|
||||||
|
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||||
|
else:
|
||||||
|
self.output.print_line(
|
||||||
|
"#%s (unrec) %s = %s"
|
||||||
|
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_message(self, pdu):
|
||||||
|
"""We received a message"""
|
||||||
|
self.output.print_line(
|
||||||
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_join(self, context, joinee):
|
||||||
|
"""Someone has joined a room, either a remote user or a local user"""
|
||||||
|
room = self._get_or_create_room(context)
|
||||||
|
room.add_participant(joinee)
|
||||||
|
|
||||||
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
|
|
||||||
|
def _on_invite(self, origin, context, invitee):
|
||||||
|
"""Someone has been invited"""
|
||||||
|
room = self._get_or_create_room(context)
|
||||||
|
room.add_invited(invitee)
|
||||||
|
|
||||||
|
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||||
|
|
||||||
|
if not room.have_got_metadata and origin is not self.server_name:
|
||||||
|
logger.debug("Get room state")
|
||||||
|
self.replication_layer.get_state_for_context(origin, context)
|
||||||
|
room.have_got_metadata = True
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def send_message(self, room_name, sender, body):
|
||||||
|
"""Send a message to a room!"""
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.replication_layer.send_pdu(
|
||||||
|
Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
pdu_type="sy.room.message",
|
||||||
|
content={"sender": sender, "body": body},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def join_room(self, room_name, sender, joinee):
|
||||||
|
"""Join a room!"""
|
||||||
|
self._on_join(room_name, joinee)
|
||||||
|
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdu = Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
pdu_type="sy.room.member",
|
||||||
|
is_state=True,
|
||||||
|
state_key=joinee,
|
||||||
|
content={"membership": "join"},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
yield self.replication_layer.send_pdu(pdu)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def invite_to_room(self, room_name, sender, invitee):
|
||||||
|
"""Invite someone to a room!"""
|
||||||
|
self._on_invite(self.server_name, room_name, invitee)
|
||||||
|
|
||||||
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.replication_layer.send_pdu(
|
||||||
|
Pdu.create_new(
|
||||||
|
context=room_name,
|
||||||
|
is_state=True,
|
||||||
|
pdu_type="sy.room.member",
|
||||||
|
state_key=invitee,
|
||||||
|
content={"membership": "invite"},
|
||||||
|
origin=self.server_name,
|
||||||
|
destinations=destinations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def backfill(self, room_name, limit=5):
|
||||||
|
room = self.joined_rooms.get(room_name)
|
||||||
|
|
||||||
|
if not room:
|
||||||
|
return
|
||||||
|
|
||||||
|
dest = room.oldest_server
|
||||||
|
|
||||||
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
|
def _get_room_remote_servers(self, room_name):
|
||||||
|
return list(self.joined_rooms.setdefault(room_name).servers)
|
||||||
|
|
||||||
|
def _get_or_create_room(self, room_name):
|
||||||
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
|
|
||||||
|
def get_servers_for_context(self, context):
|
||||||
|
return defer.succeed(
|
||||||
|
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main(stdscr):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("user", type=str)
|
||||||
|
parser.add_argument("-v", "--verbose", action="count")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
user = args.user
|
||||||
|
server_name = origin_from_ucid(user)
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||||
|
)
|
||||||
|
if not os.path.exists("logs"):
|
||||||
|
os.makedirs("logs")
|
||||||
|
fh = logging.FileHandler("logs/%s" % user)
|
||||||
|
fh.setFormatter(formatter)
|
||||||
|
|
||||||
|
root_logger.addHandler(fh)
|
||||||
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Hack: The only way to get it to stop logging to sys.stderr :(
|
||||||
|
log.theLogPublisher.observers = []
|
||||||
|
observer = log.PythonLoggingObserver()
|
||||||
|
observer.start()
|
||||||
|
|
||||||
|
# Set up synapse server
|
||||||
|
|
||||||
|
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||||
|
input_output = InputOutput(curses_stdio, user)
|
||||||
|
|
||||||
|
curses_stdio.set_callback(input_output)
|
||||||
|
|
||||||
|
app_hs = SynapseHomeServer(server_name, db_name="dbs/%s" % user)
|
||||||
|
replication = app_hs.get_replication_layer()
|
||||||
|
|
||||||
|
hs = HomeServer(server_name, replication, curses_stdio)
|
||||||
|
|
||||||
|
input_output.set_home_server(hs)
|
||||||
|
|
||||||
|
# Add input_output logger
|
||||||
|
io_logger = IOLoggerHandler(input_output)
|
||||||
|
io_logger.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(io_logger)
|
||||||
|
|
||||||
|
# Start!
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = int(server_name.split(":")[1])
|
||||||
|
except Exception:
|
||||||
|
port = 12345
|
||||||
|
|
||||||
|
app_hs.get_http_server().start_listening(port)
|
||||||
|
|
||||||
|
reactor.addReader(curses_stdio)
|
||||||
|
|
||||||
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
curses.wrapper(main)
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,11 @@
|
|||||||
|
import argparse
|
||||||
|
import cgi
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import urllib2
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -12,25 +20,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import urllib.request
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import pydot
|
def make_name(pdu_id, origin):
|
||||||
|
return "%s@%s" % (pdu_id, origin)
|
||||||
|
|
||||||
|
|
||||||
def make_name(pdu_id: str, origin: str) -> str:
|
def make_graph(pdus, room, filename_prefix):
|
||||||
return f"{pdu_id}@{origin}"
|
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by querying a homeserver.
|
|
||||||
"""
|
|
||||||
pdu_map = {}
|
pdu_map = {}
|
||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
@@ -116,10 +111,10 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
|||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> List[dict]:
|
def get_pdus(host, room):
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib2.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
"http://%s/_matrix/federation/v1/context/%s/" % (host, room)
|
||||||
).read()
|
).read()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -146,4 +141,4 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
pdus = get_pdus(host, room)
|
pdus = get_pdus(host, room)
|
||||||
|
|
||||||
make_graph(pdus, prefix)
|
make_graph(pdus, room, prefix)
|
||||||
|
|||||||
@@ -14,31 +14,22 @@
|
|||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import cgi
|
||||||
import datetime
|
import datetime
|
||||||
import html
|
|
||||||
import json
|
import json
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
from synapse.events import FrozenEvent
|
||||||
from synapse.events import make_event_from_dict
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
|
|
||||||
def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None:
|
def make_graph(db_name, room_id, file_prefix, limit):
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by reading from a Synapse SQLite database.
|
|
||||||
"""
|
|
||||||
conn = sqlite3.connect(db_name)
|
conn = sqlite3.connect(db_name)
|
||||||
|
|
||||||
sql = "SELECT room_version FROM rooms WHERE room_id = ?"
|
|
||||||
c = conn.execute(sql, (room_id,))
|
|
||||||
room_version = KNOWN_ROOM_VERSIONS[c.fetchone()[0]]
|
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT json, internal_metadata FROM event_json as j "
|
"SELECT json FROM event_json as j "
|
||||||
"INNER JOIN events as e ON e.event_id = j.event_id "
|
"INNER JOIN events as e ON e.event_id = j.event_id "
|
||||||
"WHERE j.room_id = ?"
|
"WHERE j.room_id = ?"
|
||||||
)
|
)
|
||||||
@@ -52,10 +43,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
|
|
||||||
c = conn.execute(sql, args)
|
c = conn.execute(sql, args)
|
||||||
|
|
||||||
events = [
|
events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
|
||||||
make_event_from_dict(json.loads(e[0]), room_version, json.loads(e[1]))
|
|
||||||
for e in c.fetchall()
|
|
||||||
]
|
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
events.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
@@ -96,7 +84,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
"name": event.event_id,
|
"name": event.event_id,
|
||||||
"type": event.type,
|
"type": event.type,
|
||||||
"state_key": event.get("state_key", None),
|
"state_key": event.get("state_key", None),
|
||||||
"content": html.escape(content, quote=True),
|
"content": cgi.escape(content, quote=True),
|
||||||
"time": t,
|
"time": t,
|
||||||
"depth": event.depth,
|
"depth": event.depth,
|
||||||
"state_group": state_group,
|
"state_group": state_group,
|
||||||
@@ -108,11 +96,11 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
graph.add_node(node)
|
graph.add_node(node)
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id in event.prev_event_ids():
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except Exception:
|
||||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@@ -124,7 +112,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
if len(event_ids) <= 1:
|
if len(event_ids) <= 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cluster = pydot.Cluster(str(group), label=f"<State Group: {str(group)}>")
|
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||||
|
|
||||||
for event_id in event_ids:
|
for event_id in event_ids:
|
||||||
cluster.add_node(node_map[event_id])
|
cluster.add_node(node_map[event_id])
|
||||||
@@ -138,7 +126,7 @@ def make_graph(db_name: str, room_id: str, file_prefix: str, limit: int) -> None
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate a PDU graph for a given room by talking "
|
description="Generate a PDU graph for a given room by talking "
|
||||||
"to the given Synapse SQLite file to get the list of PDUs. \n"
|
"to the given homeserver to get the list of PDUs. \n"
|
||||||
"Requires pydot."
|
"Requires pydot."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|||||||
@@ -1,3 +1,13 @@
|
|||||||
|
import argparse
|
||||||
|
import cgi
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import simplejson as json
|
||||||
|
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -12,35 +22,15 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
|
||||||
import datetime
|
|
||||||
import html
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pydot
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
|
||||||
from synapse.events import make_event_from_dict
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
|
||||||
|
|
||||||
|
|
||||||
def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|
||||||
"""
|
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
|
||||||
topological ordering by reading line-delimited JSON from a file.
|
|
||||||
"""
|
|
||||||
print("Reading lines")
|
print("Reading lines")
|
||||||
with open(file_name) as f:
|
with open(file_name) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
print("Read lines")
|
print("Read lines")
|
||||||
|
|
||||||
# Figure out the room version, assume the first line is the create event.
|
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||||
room_version = KNOWN_ROOM_VERSIONS[
|
|
||||||
json.loads(lines[0]).get("content", {}).get("room_version")
|
|
||||||
]
|
|
||||||
|
|
||||||
events = [make_event_from_dict(json.loads(line), room_version) for line in lines]
|
|
||||||
|
|
||||||
print("Loaded events.")
|
print("Loaded events.")
|
||||||
|
|
||||||
@@ -76,8 +66,8 @@ def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|||||||
content.append(
|
content.append(
|
||||||
"<b>%s</b>: %s,"
|
"<b>%s</b>: %s,"
|
||||||
% (
|
% (
|
||||||
html.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
html.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -111,11 +101,11 @@ def make_graph(file_name: str, file_prefix: str, limit: int) -> None:
|
|||||||
print("Created Nodes")
|
print("Created Nodes")
|
||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
for prev_id in event.prev_event_ids():
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except Exception:
|
||||||
end_node = pydot.Node(name=prev_id, label=f"<<b>{prev_id}</b>>")
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
graph.add_node(end_node)
|
graph.add_node(end_node)
|
||||||
@@ -149,7 +139,8 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||||
parser.add_argument("event_file")
|
parser.add_argument("event_file")
|
||||||
|
parser.add_argument("room")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
make_graph(args.event_file, args.prefix, args.limit)
|
make_graph(args.event_file, args.room, args.prefix, args.limit)
|
||||||
|
|||||||
295
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
295
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
This is an attempt at bridging matrix clients into a Jitis meet room via Matrix
|
||||||
|
video call. It uses hard-coded xml strings overg XMPP BOSH. It can display one
|
||||||
|
of the streams from the Jitsi bridge until the second lot of SDP comes down and
|
||||||
|
we set the remote SDP at which point the stream ends. Our video never gets to
|
||||||
|
the bridge.
|
||||||
|
|
||||||
|
Requires:
|
||||||
|
npm install jquery jsdom
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
import grequests
|
||||||
|
from BeautifulSoup import BeautifulSoup
|
||||||
|
|
||||||
|
ACCESS_TOKEN = ""
|
||||||
|
|
||||||
|
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||||
|
MYUSERNAME = "@davetest:matrix.org"
|
||||||
|
|
||||||
|
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||||
|
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||||
|
# ROOMNAME = "matrix"
|
||||||
|
ROOMNAME = "pibble"
|
||||||
|
|
||||||
|
HOST = "guest.jit.si"
|
||||||
|
# HOST="jitsi.vuc.me"
|
||||||
|
|
||||||
|
TURNSERVER = "turn.guest.jit.si"
|
||||||
|
# TURNSERVER="turn.jitsi.vuc.me"
|
||||||
|
|
||||||
|
ROOMDOMAIN = "meet.jit.si"
|
||||||
|
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||||
|
|
||||||
|
|
||||||
|
class TrivialMatrixClient:
|
||||||
|
def __init__(self, access_token):
|
||||||
|
self.token = None
|
||||||
|
self.access_token = access_token
|
||||||
|
|
||||||
|
def getEvent(self):
|
||||||
|
while True:
|
||||||
|
url = (
|
||||||
|
MATRIXBASE
|
||||||
|
+ "events?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
+ "&timeout=60000"
|
||||||
|
)
|
||||||
|
if self.token:
|
||||||
|
url += "&from=" + self.token
|
||||||
|
req = grequests.get(url)
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print("incoming from matrix", obj)
|
||||||
|
if "end" not in obj:
|
||||||
|
continue
|
||||||
|
self.token = obj["end"]
|
||||||
|
if len(obj["chunk"]):
|
||||||
|
return obj["chunk"][0]
|
||||||
|
|
||||||
|
def joinRoom(self, roomId):
|
||||||
|
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||||
|
print(url)
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
|
req = grequests.post(url, headers=headers, data="{}")
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print("response: ", obj)
|
||||||
|
|
||||||
|
def sendEvent(self, roomId, evType, event):
|
||||||
|
url = (
|
||||||
|
MATRIXBASE
|
||||||
|
+ "rooms/"
|
||||||
|
+ roomId
|
||||||
|
+ "/send/"
|
||||||
|
+ evType
|
||||||
|
+ "?access_token="
|
||||||
|
+ self.access_token
|
||||||
|
)
|
||||||
|
print(url)
|
||||||
|
print(json.dumps(event))
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
|
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = json.loads(resps[0].content)
|
||||||
|
print("response: ", obj)
|
||||||
|
|
||||||
|
|
||||||
|
xmppClients = {}
|
||||||
|
|
||||||
|
|
||||||
|
def matrixLoop():
|
||||||
|
while True:
|
||||||
|
ev = matrixCli.getEvent()
|
||||||
|
print(ev)
|
||||||
|
if ev["type"] == "m.room.member":
|
||||||
|
print("membership event")
|
||||||
|
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||||
|
roomId = ev["room_id"]
|
||||||
|
print("joining room %s" % (roomId))
|
||||||
|
matrixCli.joinRoom(roomId)
|
||||||
|
elif ev["type"] == "m.room.message":
|
||||||
|
if ev["room_id"] in xmppClients:
|
||||||
|
print("already have a bridge for that user, ignoring")
|
||||||
|
continue
|
||||||
|
print("got message, connecting")
|
||||||
|
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||||
|
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||||
|
elif ev["type"] == "m.call.invite":
|
||||||
|
print("Incoming call")
|
||||||
|
# sdp = ev['content']['offer']['sdp']
|
||||||
|
# print "sdp: %s" % (sdp)
|
||||||
|
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||||
|
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||||
|
elif ev["type"] == "m.call.answer":
|
||||||
|
print("Call answered")
|
||||||
|
sdp = ev["content"]["answer"]["sdp"]
|
||||||
|
if ev["room_id"] not in xmppClients:
|
||||||
|
print("We didn't have a call for that room")
|
||||||
|
continue
|
||||||
|
# should probably check call ID too
|
||||||
|
xmppCli = xmppClients[ev["room_id"]]
|
||||||
|
xmppCli.sendAnswer(sdp)
|
||||||
|
elif ev["type"] == "m.call.hangup":
|
||||||
|
if ev["room_id"] in xmppClients:
|
||||||
|
xmppClients[ev["room_id"]].stop()
|
||||||
|
del xmppClients[ev["room_id"]]
|
||||||
|
|
||||||
|
|
||||||
|
class TrivialXmppClient:
|
||||||
|
def __init__(self, matrixRoom, userId):
|
||||||
|
self.rid = 0
|
||||||
|
self.matrixRoom = matrixRoom
|
||||||
|
self.userId = userId
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
def nextRid(self):
|
||||||
|
self.rid += 1
|
||||||
|
return "%d" % (self.rid)
|
||||||
|
|
||||||
|
def sendIq(self, xml):
|
||||||
|
fullXml = (
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||||
|
% (self.nextRid(), self.sid, xml)
|
||||||
|
)
|
||||||
|
# print "\t>>>%s" % (fullXml)
|
||||||
|
return self.xmppPoke(fullXml)
|
||||||
|
|
||||||
|
def xmppPoke(self, xml):
|
||||||
|
headers = {"Content-Type": "application/xml"}
|
||||||
|
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||||
|
resps = grequests.map([req])
|
||||||
|
obj = BeautifulSoup(resps[0].content)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def sendAnswer(self, answer):
|
||||||
|
print("sdp from matrix client", answer)
|
||||||
|
p = subprocess.Popen(
|
||||||
|
["node", "unjingle/unjingle.js", "--sdp"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
jingle, out_err = p.communicate(answer)
|
||||||
|
jingle = jingle % {
|
||||||
|
"tojid": self.callfrom,
|
||||||
|
"action": "session-accept",
|
||||||
|
"initiator": self.callfrom,
|
||||||
|
"responder": self.jid,
|
||||||
|
"sid": self.callsid,
|
||||||
|
}
|
||||||
|
print("answer jingle from sdp", jingle)
|
||||||
|
res = self.sendIq(jingle)
|
||||||
|
print("reply from answer: ", res)
|
||||||
|
|
||||||
|
self.ssrcs = {}
|
||||||
|
jingleSoup = BeautifulSoup(jingle)
|
||||||
|
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||||
|
if cont.description:
|
||||||
|
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||||
|
print("my ssrcs:", self.ssrcs)
|
||||||
|
|
||||||
|
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||||
|
|
||||||
|
def advertiseSsrcs(self):
|
||||||
|
time.sleep(7)
|
||||||
|
print("SSRC spammer started")
|
||||||
|
while self.running:
|
||||||
|
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||||
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
|
"nick": self.userId,
|
||||||
|
"assrc": self.ssrcs["audio"],
|
||||||
|
"vssrc": self.ssrcs["video"],
|
||||||
|
}
|
||||||
|
res = self.sendIq(ssrcMsg)
|
||||||
|
print("reply from ssrc announce: ", res)
|
||||||
|
time.sleep(10)
|
||||||
|
|
||||||
|
def xmppLoop(self):
|
||||||
|
self.matrixCallId = time.time()
|
||||||
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), HOST)
|
||||||
|
)
|
||||||
|
|
||||||
|
print(res)
|
||||||
|
self.sid = res.body["sid"]
|
||||||
|
print("sid %s" % (self.sid))
|
||||||
|
|
||||||
|
res = self.sendIq(
|
||||||
|
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.xmppPoke(
|
||||||
|
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||||
|
% (self.nextRid(), self.sid, HOST)
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.sendIq(
|
||||||
|
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||||
|
)
|
||||||
|
print(res)
|
||||||
|
|
||||||
|
self.jid = res.body.iq.bind.jid.string
|
||||||
|
print("jid: %s" % (self.jid))
|
||||||
|
self.shortJid = self.jid.split("-")[0]
|
||||||
|
|
||||||
|
res = self.sendIq(
|
||||||
|
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||||
|
)
|
||||||
|
|
||||||
|
# randomthing = res.body.iq['to']
|
||||||
|
# whatsitpart = randomthing.split('-')[0]
|
||||||
|
|
||||||
|
# print "other random bind thing: %s" % (randomthing)
|
||||||
|
|
||||||
|
# advertise preence to the jitsi room, with our nick
|
||||||
|
res = self.sendIq(
|
||||||
|
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||||
|
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||||
|
)
|
||||||
|
self.muc = {"users": []}
|
||||||
|
for p in res.body.findAll("presence"):
|
||||||
|
u = {}
|
||||||
|
u["shortJid"] = p["from"].split("/")[1]
|
||||||
|
if p.c and p.c.nick:
|
||||||
|
u["nick"] = p.c.nick.string
|
||||||
|
self.muc["users"].append(u)
|
||||||
|
print("muc: ", self.muc)
|
||||||
|
|
||||||
|
# wait for stuff
|
||||||
|
while True:
|
||||||
|
print("waiting...")
|
||||||
|
res = self.sendIq("")
|
||||||
|
print("got from stream: ", res)
|
||||||
|
if res.body.iq:
|
||||||
|
jingles = res.body.iq.findAll("jingle")
|
||||||
|
if len(jingles):
|
||||||
|
self.callfrom = res.body.iq["from"]
|
||||||
|
self.handleInvite(jingles[0])
|
||||||
|
elif "type" in res.body and res.body["type"] == "terminate":
|
||||||
|
self.running = False
|
||||||
|
del xmppClients[self.matrixRoom]
|
||||||
|
return
|
||||||
|
|
||||||
|
def handleInvite(self, jingle):
|
||||||
|
self.initiator = jingle["initiator"]
|
||||||
|
self.callsid = jingle["sid"]
|
||||||
|
p = subprocess.Popen(
|
||||||
|
["node", "unjingle/unjingle.js", "--jingle"],
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
print("raw jingle invite", str(jingle))
|
||||||
|
sdp, out_err = p.communicate(str(jingle))
|
||||||
|
print("transformed remote offer sdp", sdp)
|
||||||
|
inviteEvent = {
|
||||||
|
"offer": {"type": "offer", "sdp": sdp},
|
||||||
|
"call_id": self.matrixCallId,
|
||||||
|
"version": 0,
|
||||||
|
"lifetime": 30000,
|
||||||
|
}
|
||||||
|
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||||
|
|
||||||
|
|
||||||
|
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||||
|
|
||||||
|
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||||
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
diff --git a/syweb/webclient/app/components/matrix/matrix-call.js b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
index 9fbfff0..dc68077 100644
|
||||||
|
--- a/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
+++ b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||||
|
@@ -16,6 +16,45 @@ limitations under the License.
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
+
|
||||||
|
+function sendKeyframe(pc) {
|
||||||
|
+ console.log('sendkeyframe', pc.iceConnectionState);
|
||||||
|
+ if (pc.iceConnectionState !== 'connected') return; // safe...
|
||||||
|
+ pc.setRemoteDescription(
|
||||||
|
+ pc.remoteDescription,
|
||||||
|
+ function () {
|
||||||
|
+ pc.createAnswer(
|
||||||
|
+ function (modifiedAnswer) {
|
||||||
|
+ pc.setLocalDescription(
|
||||||
|
+ modifiedAnswer,
|
||||||
|
+ function () {
|
||||||
|
+ // noop
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe setLocalDescription failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe createAnswer failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+ },
|
||||||
|
+ function (error) {
|
||||||
|
+ console.log('triggerKeyframe setRemoteDescription failed', error);
|
||||||
|
+ messageHandler.showError();
|
||||||
|
+ }
|
||||||
|
+ );
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+
|
||||||
|
var forAllVideoTracksOnStream = function(s, f) {
|
||||||
|
var tracks = s.getVideoTracks();
|
||||||
|
for (var i = 0; i < tracks.length; i++) {
|
||||||
|
@@ -83,7 +122,7 @@ angular.module('MatrixCall', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: we should prevent any calls from being placed or accepted before this has finished
|
||||||
|
- MatrixCall.getTurnServer();
|
||||||
|
+ //MatrixCall.getTurnServer();
|
||||||
|
|
||||||
|
MatrixCall.CALL_TIMEOUT = 60000;
|
||||||
|
MatrixCall.FALLBACK_STUN_SERVER = 'stun:stun.l.google.com:19302';
|
||||||
|
@@ -132,6 +171,22 @@ angular.module('MatrixCall', [])
|
||||||
|
pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); };
|
||||||
|
pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); };
|
||||||
|
pc.onaddstream = function(s) { self.onAddStream(s); };
|
||||||
|
+
|
||||||
|
+ var datachan = pc.createDataChannel('RTCDataChannel', {
|
||||||
|
+ reliable: false
|
||||||
|
+ });
|
||||||
|
+ console.log("data chan: "+datachan);
|
||||||
|
+ datachan.onopen = function() {
|
||||||
|
+ console.log("data channel open");
|
||||||
|
+ };
|
||||||
|
+ datachan.onmessage = function() {
|
||||||
|
+ console.log("data channel message");
|
||||||
|
+ };
|
||||||
|
+ pc.ondatachannel = function(event) {
|
||||||
|
+ console.log("have data channel");
|
||||||
|
+ event.channel.binaryType = 'blob';
|
||||||
|
+ };
|
||||||
|
+
|
||||||
|
return pc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -200,6 +255,12 @@ angular.module('MatrixCall', [])
|
||||||
|
}, this.msg.lifetime - event.age);
|
||||||
|
};
|
||||||
|
|
||||||
|
+ MatrixCall.prototype.receivedInvite = function(event) {
|
||||||
|
+ console.log("Got second invite for call "+this.call_id);
|
||||||
|
+ this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError);
|
||||||
|
+ };
|
||||||
|
+
|
||||||
|
+
|
||||||
|
// perverse as it may seem, sometimes we want to instantiate a call with a hangup message
|
||||||
|
// (because when getting the state of the room on load, events come in reverse order and
|
||||||
|
// we want to remember that a call has been hung up)
|
||||||
|
@@ -349,7 +410,7 @@ angular.module('MatrixCall', [])
|
||||||
|
'mandatory': {
|
||||||
|
'OfferToReceiveAudio': true,
|
||||||
|
'OfferToReceiveVideo': this.type == 'video'
|
||||||
|
- },
|
||||||
|
+ }
|
||||||
|
};
|
||||||
|
this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints);
|
||||||
|
// This can't be in an apply() because it's called by a predecessor call under glare conditions :(
|
||||||
|
@@ -359,8 +420,20 @@ angular.module('MatrixCall', [])
|
||||||
|
MatrixCall.prototype.gotLocalIceCandidate = function(event) {
|
||||||
|
if (event.candidate) {
|
||||||
|
console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate);
|
||||||
|
- this.sendCandidate(event.candidate);
|
||||||
|
- }
|
||||||
|
+ //this.sendCandidate(event.candidate);
|
||||||
|
+ } else {
|
||||||
|
+ console.log("have all candidates, sending answer");
|
||||||
|
+ var content = {
|
||||||
|
+ version: 0,
|
||||||
|
+ call_id: this.call_id,
|
||||||
|
+ answer: this.peerConn.localDescription
|
||||||
|
+ };
|
||||||
|
+ this.sendEventWithRetry('m.call.answer', content);
|
||||||
|
+ var self = this;
|
||||||
|
+ $rootScope.$apply(function() {
|
||||||
|
+ self.state = 'connecting';
|
||||||
|
+ });
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
|
||||||
|
MatrixCall.prototype.gotRemoteIceCandidate = function(cand) {
|
||||||
|
@@ -418,15 +491,6 @@ angular.module('MatrixCall', [])
|
||||||
|
console.log("Created answer: "+description);
|
||||||
|
var self = this;
|
||||||
|
this.peerConn.setLocalDescription(description, function() {
|
||||||
|
- var content = {
|
||||||
|
- version: 0,
|
||||||
|
- call_id: self.call_id,
|
||||||
|
- answer: self.peerConn.localDescription
|
||||||
|
- };
|
||||||
|
- self.sendEventWithRetry('m.call.answer', content);
|
||||||
|
- $rootScope.$apply(function() {
|
||||||
|
- self.state = 'connecting';
|
||||||
|
- });
|
||||||
|
}, function() { console.log("Error setting local description!"); } );
|
||||||
|
};
|
||||||
|
|
||||||
|
@@ -448,6 +512,9 @@ angular.module('MatrixCall', [])
|
||||||
|
$rootScope.$apply(function() {
|
||||||
|
self.state = 'connected';
|
||||||
|
self.didConnect = true;
|
||||||
|
+ /*$timeout(function() {
|
||||||
|
+ sendKeyframe(self.peerConn);
|
||||||
|
+ }, 1000);*/
|
||||||
|
});
|
||||||
|
} else if (this.peerConn.iceConnectionState == 'failed') {
|
||||||
|
this.hangup('ice_failed');
|
||||||
|
@@ -518,6 +585,7 @@ angular.module('MatrixCall', [])
|
||||||
|
|
||||||
|
MatrixCall.prototype.onRemoteStreamEnded = function(event) {
|
||||||
|
console.log("Remote stream ended");
|
||||||
|
+ return;
|
||||||
|
var self = this;
|
||||||
|
$rootScope.$apply(function() {
|
||||||
|
self.state = 'ended';
|
||||||
|
diff --git a/syweb/webclient/app/components/matrix/matrix-phone-service.js b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
index 55dbbf5..272fa27 100644
|
||||||
|
--- a/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
+++ b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||||
|
@@ -48,6 +48,13 @@ angular.module('matrixPhoneService', [])
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
+ // do we already have an entry for this call ID?
|
||||||
|
+ var existingEntry = matrixPhoneService.allCalls[msg.call_id];
|
||||||
|
+ if (existingEntry) {
|
||||||
|
+ existingEntry.receivedInvite(msg);
|
||||||
|
+ return;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
var call = undefined;
|
||||||
|
if (!isLive) {
|
||||||
|
// if this event wasn't live then this call may already be over
|
||||||
|
@@ -108,7 +115,7 @@ angular.module('matrixPhoneService', [])
|
||||||
|
call.hangup();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
- $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||||
|
+ $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||||
|
}
|
||||||
|
} else if (event.type == 'm.call.answer') {
|
||||||
|
var call = matrixPhoneService.allCalls[msg.call_id];
|
||||||
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
/* jshint -W117 */
|
||||||
|
// SDP STUFF
|
||||||
|
function SDP(sdp) {
|
||||||
|
this.media = sdp.split('\r\nm=');
|
||||||
|
for (var i = 1; i < this.media.length; i++) {
|
||||||
|
this.media[i] = 'm=' + this.media[i];
|
||||||
|
if (i != this.media.length - 1) {
|
||||||
|
this.media[i] += '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.session = this.media.shift() + '\r\n';
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.SDP = SDP;
|
||||||
|
|
||||||
|
var jsdom = require("jsdom");
|
||||||
|
var window = jsdom.jsdom().parentWindow;
|
||||||
|
var $ = require('jquery')(window);
|
||||||
|
|
||||||
|
var SDPUtil = require('./strophe.jingle.sdp.util.js').SDPUtil;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns map of MediaChannel mapped per channel idx.
|
||||||
|
*/
|
||||||
|
SDP.prototype.getMediaSsrcMap = function() {
|
||||||
|
var self = this;
|
||||||
|
var media_ssrcs = {};
|
||||||
|
for (channelNum = 0; channelNum < self.media.length; channelNum++) {
|
||||||
|
modified = true;
|
||||||
|
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc:');
|
||||||
|
var type = SDPUtil.parse_mid(SDPUtil.find_line(self.media[channelNum], 'a=mid:'));
|
||||||
|
var channel = new MediaChannel(channelNum, type);
|
||||||
|
media_ssrcs[channelNum] = channel;
|
||||||
|
tmp.forEach(function (line) {
|
||||||
|
var linessrc = line.substring(7).split(' ')[0];
|
||||||
|
// allocate new ChannelSsrc
|
||||||
|
if(!channel.ssrcs[linessrc]) {
|
||||||
|
channel.ssrcs[linessrc] = new ChannelSsrc(linessrc, type);
|
||||||
|
}
|
||||||
|
channel.ssrcs[linessrc].lines.push(line);
|
||||||
|
});
|
||||||
|
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc-group:');
|
||||||
|
tmp.forEach(function(line){
|
||||||
|
var semantics = line.substr(0, idx).substr(13);
|
||||||
|
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
var ssrcGroup = new ChannelSsrcGroup(semantics, ssrcs);
|
||||||
|
channel.ssrcGroups.push(ssrcGroup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return media_ssrcs;
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns <tt>true</tt> if this SDP contains given SSRC.
|
||||||
|
* @param ssrc the ssrc to check.
|
||||||
|
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
|
||||||
|
*/
|
||||||
|
SDP.prototype.containsSSRC = function(ssrc) {
|
||||||
|
var channels = this.getMediaSsrcMap();
|
||||||
|
var contains = false;
|
||||||
|
Object.keys(channels).forEach(function(chNumber){
|
||||||
|
var channel = channels[chNumber];
|
||||||
|
//console.log("Check", channel, ssrc);
|
||||||
|
if(Object.keys(channel.ssrcs).indexOf(ssrc) != -1){
|
||||||
|
contains = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return contains;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns map of MediaChannel that contains only media not contained in <tt>otherSdp</tt>. Mapped by channel idx.
|
||||||
|
* @param otherSdp the other SDP to check ssrc with.
|
||||||
|
*/
|
||||||
|
SDP.prototype.getNewMedia = function(otherSdp) {
|
||||||
|
|
||||||
|
// this could be useful in Array.prototype.
|
||||||
|
function arrayEquals(array) {
|
||||||
|
// if the other array is a falsy value, return
|
||||||
|
if (!array)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
// compare lengths - can save a lot of time
|
||||||
|
if (this.length != array.length)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
for (var i = 0, l=this.length; i < l; i++) {
|
||||||
|
// Check if we have nested arrays
|
||||||
|
if (this[i] instanceof Array && array[i] instanceof Array) {
|
||||||
|
// recurse into the nested arrays
|
||||||
|
if (!this[i].equals(array[i]))
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else if (this[i] != array[i]) {
|
||||||
|
// Warning - two different object instances will never be equal: {x:20} != {x:20}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var myMedia = this.getMediaSsrcMap();
|
||||||
|
var othersMedia = otherSdp.getMediaSsrcMap();
|
||||||
|
var newMedia = {};
|
||||||
|
Object.keys(othersMedia).forEach(function(channelNum) {
|
||||||
|
var myChannel = myMedia[channelNum];
|
||||||
|
var othersChannel = othersMedia[channelNum];
|
||||||
|
if(!myChannel && othersChannel) {
|
||||||
|
// Add whole channel
|
||||||
|
newMedia[channelNum] = othersChannel;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Look for new ssrcs accross the channel
|
||||||
|
Object.keys(othersChannel.ssrcs).forEach(function(ssrc) {
|
||||||
|
if(Object.keys(myChannel.ssrcs).indexOf(ssrc) === -1) {
|
||||||
|
// Allocate channel if we've found ssrc that doesn't exist in our channel
|
||||||
|
if(!newMedia[channelNum]){
|
||||||
|
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||||
|
}
|
||||||
|
newMedia[channelNum].ssrcs[ssrc] = othersChannel.ssrcs[ssrc];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Look for new ssrc groups across the channels
|
||||||
|
othersChannel.ssrcGroups.forEach(function(otherSsrcGroup){
|
||||||
|
|
||||||
|
// try to match the other ssrc-group with an ssrc-group of ours
|
||||||
|
var matched = false;
|
||||||
|
for (var i = 0; i < myChannel.ssrcGroups.length; i++) {
|
||||||
|
var mySsrcGroup = myChannel.ssrcGroups[i];
|
||||||
|
if (otherSsrcGroup.semantics == mySsrcGroup.semantics
|
||||||
|
&& arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) {
|
||||||
|
|
||||||
|
matched = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!matched) {
|
||||||
|
// Allocate channel if we've found an ssrc-group that doesn't
|
||||||
|
// exist in our channel
|
||||||
|
|
||||||
|
if(!newMedia[channelNum]){
|
||||||
|
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||||
|
}
|
||||||
|
newMedia[channelNum].ssrcGroups.push(otherSsrcGroup);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return newMedia;
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove iSAC and CN from SDP
|
||||||
|
SDP.prototype.mangle = function () {
|
||||||
|
var i, j, mline, lines, rtpmap, newdesc;
|
||||||
|
for (i = 0; i < this.media.length; i++) {
|
||||||
|
lines = this.media[i].split('\r\n');
|
||||||
|
lines.pop(); // remove empty last element
|
||||||
|
mline = SDPUtil.parse_mline(lines.shift());
|
||||||
|
if (mline.media != 'audio')
|
||||||
|
continue;
|
||||||
|
newdesc = '';
|
||||||
|
mline.fmt.length = 0;
|
||||||
|
for (j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
|
||||||
|
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
|
||||||
|
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
|
||||||
|
continue;
|
||||||
|
mline.fmt.push(rtpmap.id);
|
||||||
|
newdesc += lines[j] + '\r\n';
|
||||||
|
} else {
|
||||||
|
newdesc += lines[j] + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
|
||||||
|
this.media[i] += newdesc;
|
||||||
|
}
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove lines matching prefix from session section
|
||||||
|
SDP.prototype.removeSessionLines = function(prefix) {
|
||||||
|
var self = this;
|
||||||
|
var lines = SDPUtil.find_lines(this.session, prefix);
|
||||||
|
lines.forEach(function(line) {
|
||||||
|
self.session = self.session.replace(line + '\r\n', '');
|
||||||
|
});
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
// remove lines matching prefix from a media section specified by mediaindex
|
||||||
|
// TODO: non-numeric mediaindex could match mid
|
||||||
|
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
||||||
|
var self = this;
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
|
||||||
|
lines.forEach(function(line) {
|
||||||
|
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
|
||||||
|
});
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
return lines;
|
||||||
|
}
|
||||||
|
|
||||||
|
// add content's to a jingle element
|
||||||
|
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||||
|
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||||
|
var self = this;
|
||||||
|
// new bundle plan
|
||||||
|
if (SDPUtil.find_line(this.session, 'a=group:')) {
|
||||||
|
lines = SDPUtil.find_lines(this.session, 'a=group:');
|
||||||
|
for (i = 0; i < lines.length; i++) {
|
||||||
|
tmp = lines[i].split(' ');
|
||||||
|
var semantics = tmp.shift().substr(8);
|
||||||
|
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
|
||||||
|
for (j = 0; j < tmp.length; j++) {
|
||||||
|
elem.c('content', {name: tmp[j]}).up();
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// old bundle plan, to be removed
|
||||||
|
var bundle = [];
|
||||||
|
if (SDPUtil.find_line(this.session, 'a=group:BUNDLE')) {
|
||||||
|
bundle = SDPUtil.find_line(this.session, 'a=group:BUNDLE ').split(' ');
|
||||||
|
bundle.shift();
|
||||||
|
}
|
||||||
|
for (i = 0; i < this.media.length; i++) {
|
||||||
|
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
|
||||||
|
if (!(mline.media === 'audio' ||
|
||||||
|
mline.media === 'video' ||
|
||||||
|
mline.media === 'application'))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||||
|
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||||
|
} else {
|
||||||
|
ssrc = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
elem.c('content', {creator: thecreator, name: mline.media});
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
|
||||||
|
// prefer identifier from a=mid if present
|
||||||
|
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
|
||||||
|
elem.attrs({ name: mid });
|
||||||
|
|
||||||
|
// old BUNDLE plan, to be removed
|
||||||
|
if (bundle.indexOf(mid) !== -1) {
|
||||||
|
elem.c('bundle', {xmlns: 'http://estos.de/ns/bundle'}).up();
|
||||||
|
bundle.splice(bundle.indexOf(mid), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length)
|
||||||
|
{
|
||||||
|
elem.c('description',
|
||||||
|
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
|
||||||
|
media: mline.media });
|
||||||
|
if (ssrc) {
|
||||||
|
elem.attrs({ssrc: ssrc});
|
||||||
|
}
|
||||||
|
for (j = 0; j < mline.fmt.length; j++) {
|
||||||
|
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
|
||||||
|
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
|
||||||
|
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
|
||||||
|
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
|
||||||
|
for (k = 0; k < tmp.length; k++) {
|
||||||
|
elem.c('parameter', tmp[k]).up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.RtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
|
||||||
|
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
|
||||||
|
elem.c('encryption', {required: 1});
|
||||||
|
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
|
||||||
|
crypto.forEach(function(line) {
|
||||||
|
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
|
||||||
|
});
|
||||||
|
elem.up(); // end of encryption
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ssrc) {
|
||||||
|
// new style mapping
|
||||||
|
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
// FIXME: group by ssrc and support multiple different ssrcs
|
||||||
|
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||||
|
ssrclines.forEach(function(line) {
|
||||||
|
idx = line.indexOf(' ');
|
||||||
|
var linessrc = line.substr(0, idx).substr(7);
|
||||||
|
if (linessrc != ssrc) {
|
||||||
|
elem.up();
|
||||||
|
ssrc = linessrc;
|
||||||
|
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
}
|
||||||
|
var kv = line.substr(idx + 1);
|
||||||
|
elem.c('parameter');
|
||||||
|
if (kv.indexOf(':') == -1) {
|
||||||
|
elem.attrs({ name: kv });
|
||||||
|
} else {
|
||||||
|
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||||
|
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
});
|
||||||
|
elem.up();
|
||||||
|
|
||||||
|
// old proprietary mapping, to be removed at some point
|
||||||
|
tmp = SDPUtil.parse_ssrc(this.media[i]);
|
||||||
|
tmp.xmlns = 'http://estos.de/ns/ssrc';
|
||||||
|
tmp.ssrc = ssrc;
|
||||||
|
elem.c('ssrc', tmp).up(); // ssrc is part of description
|
||||||
|
|
||||||
|
// XEP-0339 handle ssrc-group attributes
|
||||||
|
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||||
|
ssrc_group_lines.forEach(function(line) {
|
||||||
|
idx = line.indexOf(' ');
|
||||||
|
var semantics = line.substr(0, idx).substr(13);
|
||||||
|
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||||
|
ssrcs.forEach(function(ssrc) {
|
||||||
|
elem.c('source', { ssrc: ssrc })
|
||||||
|
.up();
|
||||||
|
});
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
|
||||||
|
elem.c('rtcp-mux').up();
|
||||||
|
}
|
||||||
|
|
||||||
|
// XEP-0293 -- map a=rtcp-fb:*
|
||||||
|
this.RtcpFbToJingle(i, elem, '*');
|
||||||
|
|
||||||
|
// XEP-0294
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
|
||||||
|
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
|
||||||
|
for (j = 0; j < lines.length; j++) {
|
||||||
|
tmp = SDPUtil.parse_extmap(lines[j]);
|
||||||
|
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
|
||||||
|
uri: tmp.uri,
|
||||||
|
id: tmp.value });
|
||||||
|
if (tmp.hasOwnProperty('direction')) {
|
||||||
|
switch (tmp.direction) {
|
||||||
|
case 'sendonly':
|
||||||
|
elem.attrs({senders: 'responder'});
|
||||||
|
break;
|
||||||
|
case 'recvonly':
|
||||||
|
elem.attrs({senders: 'initiator'});
|
||||||
|
break;
|
||||||
|
case 'sendrecv':
|
||||||
|
elem.attrs({senders: 'both'});
|
||||||
|
break;
|
||||||
|
case 'inactive':
|
||||||
|
elem.attrs({senders: 'none'});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: handle params
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elem.up(); // end of description
|
||||||
|
}
|
||||||
|
|
||||||
|
// map ice-ufrag/pwd, dtls fingerprint, candidates
|
||||||
|
this.TransportToJingle(i, elem);
|
||||||
|
|
||||||
|
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
|
||||||
|
elem.attrs({senders: 'both'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
|
||||||
|
elem.attrs({senders: 'initiator'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
|
||||||
|
elem.attrs({senders: 'responder'});
|
||||||
|
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
|
||||||
|
elem.attrs({senders: 'none'});
|
||||||
|
}
|
||||||
|
if (mline.port == '0') {
|
||||||
|
// estos hack to reject an m-line
|
||||||
|
elem.attrs({senders: 'rejected'});
|
||||||
|
}
|
||||||
|
elem.up(); // end of content
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
return elem;
|
||||||
|
};
|
||||||
|
|
||||||
|
SDP.prototype.TransportToJingle = function (mediaindex, elem) {
|
||||||
|
var i = mediaindex;
|
||||||
|
var tmp;
|
||||||
|
var self = this;
|
||||||
|
elem.c('transport');
|
||||||
|
|
||||||
|
// XEP-0343 DTLS/SCTP
|
||||||
|
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
|
||||||
|
{
|
||||||
|
var sctpmap = SDPUtil.find_line(
|
||||||
|
this.media[i], 'a=sctpmap:', self.session);
|
||||||
|
if (sctpmap)
|
||||||
|
{
|
||||||
|
var sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
|
||||||
|
elem.c('sctpmap',
|
||||||
|
{
|
||||||
|
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
|
||||||
|
number: sctpAttrs[0], /* SCTP port */
|
||||||
|
protocol: sctpAttrs[1], /* protocol */
|
||||||
|
});
|
||||||
|
// Optional stream count attribute
|
||||||
|
if (sctpAttrs.length > 2)
|
||||||
|
elem.attrs({ streams: sctpAttrs[2]});
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// XEP-0320
|
||||||
|
var fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
|
||||||
|
fingerprints.forEach(function(line) {
|
||||||
|
tmp = SDPUtil.parse_fingerprint(line);
|
||||||
|
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
|
||||||
|
elem.c('fingerprint').t(tmp.fingerprint);
|
||||||
|
delete tmp.fingerprint;
|
||||||
|
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
|
||||||
|
if (line) {
|
||||||
|
tmp.setup = line.substr(8);
|
||||||
|
}
|
||||||
|
elem.attrs(tmp);
|
||||||
|
elem.up(); // end of fingerprint
|
||||||
|
});
|
||||||
|
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
|
||||||
|
if (tmp) {
|
||||||
|
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||||
|
elem.attrs(tmp);
|
||||||
|
// XEP-0176
|
||||||
|
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
|
||||||
|
lines.forEach(function (line) {
|
||||||
|
elem.c('candidate', SDPUtil.candidateToJingle(line)).up();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elem.up(); // end of transport
|
||||||
|
}
|
||||||
|
|
||||||
|
SDP.prototype.RtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
|
||||||
|
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
|
||||||
|
lines.forEach(function (line) {
|
||||||
|
var tmp = SDPUtil.parse_rtcpfb(line);
|
||||||
|
if (tmp.type == 'trr-int') {
|
||||||
|
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
|
||||||
|
elem.up();
|
||||||
|
} else {
|
||||||
|
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
|
||||||
|
if (tmp.params.length > 0) {
|
||||||
|
elem.attrs({'subtype': tmp.params[0]});
|
||||||
|
}
|
||||||
|
elem.up();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
SDP.prototype.RtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
|
||||||
|
var media = '';
|
||||||
|
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||||
|
if (tmp.length) {
|
||||||
|
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
|
||||||
|
if (tmp.attr('value')) {
|
||||||
|
media += tmp.attr('value');
|
||||||
|
} else {
|
||||||
|
media += '0';
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
|
||||||
|
if ($(this).attr('subtype')) {
|
||||||
|
media += ' ' + $(this).attr('subtype');
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
return media;
|
||||||
|
};
|
||||||
|
|
||||||
|
// construct an SDP from a jingle stanza
|
||||||
|
SDP.prototype.fromJingle = function (jingle) {
|
||||||
|
var self = this;
|
||||||
|
this.raw = 'v=0\r\n' +
|
||||||
|
'o=- ' + '1923518516' + ' 2 IN IP4 0.0.0.0\r\n' +// FIXME
|
||||||
|
's=-\r\n' +
|
||||||
|
't=0 0\r\n';
|
||||||
|
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
|
||||||
|
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
|
||||||
|
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
|
||||||
|
var contents = $(group).find('>content').map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (contents.length > 0) {
|
||||||
|
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else if ($(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').length) {
|
||||||
|
// temporary namespace, not to be used. to be removed soon.
|
||||||
|
$(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').each(function (idx, group) {
|
||||||
|
var contents = $(group).find('>content').map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (group.getAttribute('type') !== null && contents.length > 0) {
|
||||||
|
self.raw += 'a=group:' + group.getAttribute('type') + ' ' + contents.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// for backward compability, to be removed soon
|
||||||
|
// assume all contents are in the same bundle group, can be improved upon later
|
||||||
|
var bundle = $(jingle).find('>content').filter(function (idx, content) {
|
||||||
|
//elem.c('bundle', {xmlns:'http://estos.de/ns/bundle'});
|
||||||
|
return $(content).find('>bundle').length > 0;
|
||||||
|
}).map(function (idx, content) {
|
||||||
|
return content.getAttribute('name');
|
||||||
|
}).get();
|
||||||
|
if (bundle.length) {
|
||||||
|
this.raw += 'a=group:BUNDLE ' + bundle.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.session = this.raw;
|
||||||
|
jingle.find('>content').each(function () {
|
||||||
|
var m = self.jingle2media($(this));
|
||||||
|
self.media.push(m);
|
||||||
|
});
|
||||||
|
|
||||||
|
// reconstruct msid-semantic -- apparently not necessary
|
||||||
|
/*
|
||||||
|
var msid = SDPUtil.parse_ssrc(this.raw);
|
||||||
|
if (msid.hasOwnProperty('mslabel')) {
|
||||||
|
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
this.raw = this.session + this.media.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
// translate a jingle content element into an an SDP media part
|
||||||
|
SDP.prototype.jingle2media = function (content) {
|
||||||
|
var media = '',
|
||||||
|
desc = content.find('description'),
|
||||||
|
ssrc = desc.attr('ssrc'),
|
||||||
|
self = this,
|
||||||
|
tmp;
|
||||||
|
var sctp = content.find(
|
||||||
|
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
|
||||||
|
|
||||||
|
tmp = { media: desc.attr('media') };
|
||||||
|
tmp.port = '1';
|
||||||
|
if (content.attr('senders') == 'rejected') {
|
||||||
|
// estos hack to reject an m-line.
|
||||||
|
tmp.port = '0';
|
||||||
|
}
|
||||||
|
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
|
||||||
|
if (sctp.length)
|
||||||
|
tmp.proto = 'DTLS/SCTP';
|
||||||
|
else
|
||||||
|
tmp.proto = 'RTP/SAVPF';
|
||||||
|
} else {
|
||||||
|
tmp.proto = 'RTP/AVPF';
|
||||||
|
}
|
||||||
|
if (!sctp.length)
|
||||||
|
{
|
||||||
|
tmp.fmt = desc.find('payload-type').map(
|
||||||
|
function () { return this.getAttribute('id'); }).get();
|
||||||
|
media += SDPUtil.build_mline(tmp) + '\r\n';
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
|
||||||
|
media += 'a=sctpmap:' + sctp.attr('number') +
|
||||||
|
' ' + sctp.attr('protocol');
|
||||||
|
|
||||||
|
var streamCount = sctp.attr('streams');
|
||||||
|
if (streamCount)
|
||||||
|
media += ' ' + streamCount + '\r\n';
|
||||||
|
else
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
media += 'c=IN IP4 0.0.0.0\r\n';
|
||||||
|
if (!sctp.length)
|
||||||
|
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
|
||||||
|
//tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||||
|
tmp = content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||||
|
//console.log('transports: '+content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||||
|
//console.log('bundle.transports: '+content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||||
|
//console.log("tmp fingerprint: "+tmp.find('>fingerprint').innerHTML);
|
||||||
|
if (tmp.length) {
|
||||||
|
if (tmp.attr('ufrag')) {
|
||||||
|
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
|
||||||
|
}
|
||||||
|
if (tmp.attr('pwd')) {
|
||||||
|
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
|
||||||
|
}
|
||||||
|
tmp.find('>fingerprint').each(function () {
|
||||||
|
// FIXME: check namespace at some point
|
||||||
|
media += 'a=fingerprint:' + this.getAttribute('hash');
|
||||||
|
media += ' ' + $(this).text();
|
||||||
|
media += '\r\n';
|
||||||
|
//console.log("mline "+media);
|
||||||
|
if (this.getAttribute('setup')) {
|
||||||
|
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
switch (content.attr('senders')) {
|
||||||
|
case 'initiator':
|
||||||
|
media += 'a=sendonly\r\n';
|
||||||
|
break;
|
||||||
|
case 'responder':
|
||||||
|
media += 'a=recvonly\r\n';
|
||||||
|
break;
|
||||||
|
case 'none':
|
||||||
|
media += 'a=inactive\r\n';
|
||||||
|
break;
|
||||||
|
case 'both':
|
||||||
|
media += 'a=sendrecv\r\n';
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
media += 'a=mid:' + content.attr('name') + '\r\n';
|
||||||
|
/*if (content.attr('name') == 'video') {
|
||||||
|
media += 'a=x-google-flag:conference' + '\r\n';
|
||||||
|
}*/
|
||||||
|
|
||||||
|
// <description><rtcp-mux/></description>
|
||||||
|
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
|
||||||
|
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
|
||||||
|
if (desc.find('rtcp-mux').length) {
|
||||||
|
media += 'a=rtcp-mux\r\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (desc.find('encryption').length) {
|
||||||
|
desc.find('encryption>crypto').each(function () {
|
||||||
|
media += 'a=crypto:' + this.getAttribute('tag');
|
||||||
|
media += ' ' + this.getAttribute('crypto-suite');
|
||||||
|
media += ' ' + this.getAttribute('key-params');
|
||||||
|
if (this.getAttribute('session-params')) {
|
||||||
|
media += ' ' + this.getAttribute('session-params');
|
||||||
|
}
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
desc.find('payload-type').each(function () {
|
||||||
|
media += SDPUtil.build_rtpmap(this) + '\r\n';
|
||||||
|
if ($(this).find('>parameter').length) {
|
||||||
|
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
|
||||||
|
media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; ');
|
||||||
|
media += '\r\n';
|
||||||
|
}
|
||||||
|
// xep-0293
|
||||||
|
media += self.RtcpFbFromJingle($(this), this.getAttribute('id'));
|
||||||
|
});
|
||||||
|
|
||||||
|
// xep-0293
|
||||||
|
media += self.RtcpFbFromJingle(desc, '*');
|
||||||
|
|
||||||
|
// xep-0294
|
||||||
|
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
|
||||||
|
});
|
||||||
|
|
||||||
|
content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
|
||||||
|
media += SDPUtil.candidateFromJingle(this);
|
||||||
|
});
|
||||||
|
|
||||||
|
// XEP-0339 handle ssrc-group attributes
|
||||||
|
tmp = content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
|
||||||
|
var semantics = this.getAttribute('semantics');
|
||||||
|
var ssrcs = $(this).find('>source').map(function() {
|
||||||
|
return this.getAttribute('ssrc');
|
||||||
|
}).get();
|
||||||
|
|
||||||
|
if (ssrcs.length != 0) {
|
||||||
|
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
|
||||||
|
tmp.each(function () {
|
||||||
|
var ssrc = this.getAttribute('ssrc');
|
||||||
|
$(this).find('>parameter').each(function () {
|
||||||
|
media += 'a=ssrc:' + ssrc + ' ' + this.getAttribute('name');
|
||||||
|
if (this.getAttribute('value') && this.getAttribute('value').length)
|
||||||
|
media += ':' + this.getAttribute('value');
|
||||||
|
media += '\r\n';
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (tmp.length === 0) {
|
||||||
|
// fallback to proprietary mapping of a=ssrc lines
|
||||||
|
tmp = content.find('description>ssrc[xmlns="http://estos.de/ns/ssrc"]');
|
||||||
|
if (tmp.length) {
|
||||||
|
media += 'a=ssrc:' + ssrc + ' cname:' + tmp.attr('cname') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' msid:' + tmp.attr('msid') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' mslabel:' + tmp.attr('mslabel') + '\r\n';
|
||||||
|
media += 'a=ssrc:' + ssrc + ' label:' + tmp.attr('label') + '\r\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return media;
|
||||||
|
};
|
||||||
|
|
||||||
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
@@ -0,0 +1,408 @@
|
|||||||
|
/**
|
||||||
|
* Contains utility classes used in SDP class.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class holds a=ssrc lines and media type a=mid
|
||||||
|
* @param ssrc synchronization source identifier number(a=ssrc lines from SDP)
|
||||||
|
* @param type media type eg. "audio" or "video"(a=mid frm SDP)
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function ChannelSsrc(ssrc, type) {
|
||||||
|
this.ssrc = ssrc;
|
||||||
|
this.type = type;
|
||||||
|
this.lines = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class holds a=ssrc-group: lines
|
||||||
|
* @param semantics
|
||||||
|
* @param ssrcs
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function ChannelSsrcGroup(semantics, ssrcs, line) {
|
||||||
|
this.semantics = semantics;
|
||||||
|
this.ssrcs = ssrcs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class represents media channel. Is a container for ChannelSsrc, holds channel idx and media type.
|
||||||
|
* @param channelNumber channel idx in SDP media array.
|
||||||
|
* @param mediaType media type(a=mid)
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
function MediaChannel(channelNumber, mediaType) {
|
||||||
|
/**
|
||||||
|
* SDP channel number
|
||||||
|
* @type {*}
|
||||||
|
*/
|
||||||
|
this.chNumber = channelNumber;
|
||||||
|
/**
|
||||||
|
* Channel media type(a=mid)
|
||||||
|
* @type {*}
|
||||||
|
*/
|
||||||
|
this.mediaType = mediaType;
|
||||||
|
/**
|
||||||
|
* The maps of ssrc numbers to ChannelSsrc objects.
|
||||||
|
*/
|
||||||
|
this.ssrcs = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The array of ChannelSsrcGroup objects.
|
||||||
|
* @type {Array}
|
||||||
|
*/
|
||||||
|
this.ssrcGroups = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
SDPUtil = {
|
||||||
|
iceparams: function (mediadesc, sessiondesc) {
|
||||||
|
var data = null;
|
||||||
|
if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) &&
|
||||||
|
SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) {
|
||||||
|
data = {
|
||||||
|
ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)),
|
||||||
|
pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_iceufrag: function (line) {
|
||||||
|
return line.substring(12);
|
||||||
|
},
|
||||||
|
build_iceufrag: function (frag) {
|
||||||
|
return 'a=ice-ufrag:' + frag;
|
||||||
|
},
|
||||||
|
parse_icepwd: function (line) {
|
||||||
|
return line.substring(10);
|
||||||
|
},
|
||||||
|
build_icepwd: function (pwd) {
|
||||||
|
return 'a=ice-pwd:' + pwd;
|
||||||
|
},
|
||||||
|
parse_mid: function (line) {
|
||||||
|
return line.substring(6);
|
||||||
|
},
|
||||||
|
parse_mline: function (line) {
|
||||||
|
var parts = line.substring(2).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.media = parts.shift();
|
||||||
|
data.port = parts.shift();
|
||||||
|
data.proto = parts.shift();
|
||||||
|
if (parts[parts.length - 1] === '') { // trailing whitespace
|
||||||
|
parts.pop();
|
||||||
|
}
|
||||||
|
data.fmt = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
build_mline: function (mline) {
|
||||||
|
return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' ');
|
||||||
|
},
|
||||||
|
parse_rtpmap: function (line) {
|
||||||
|
var parts = line.substring(9).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.id = parts.shift();
|
||||||
|
parts = parts[0].split('/');
|
||||||
|
data.name = parts.shift();
|
||||||
|
data.clockrate = parts.shift();
|
||||||
|
data.channels = parts.length ? parts.shift() : '1';
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
|
||||||
|
* @param line eg. "a=sctpmap:5000 webrtc-datachannel"
|
||||||
|
* @returns [SCTP port number, protocol, streams]
|
||||||
|
*/
|
||||||
|
parse_sctpmap: function (line)
|
||||||
|
{
|
||||||
|
var parts = line.substring(10).split(' ');
|
||||||
|
var sctpPort = parts[0];
|
||||||
|
var protocol = parts[1];
|
||||||
|
// Stream count is optional
|
||||||
|
var streamCount = parts.length > 2 ? parts[2] : null;
|
||||||
|
return [sctpPort, protocol, streamCount];// SCTP port
|
||||||
|
},
|
||||||
|
build_rtpmap: function (el) {
|
||||||
|
var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate');
|
||||||
|
if (el.getAttribute('channels') && el.getAttribute('channels') != '1') {
|
||||||
|
line += '/' + el.getAttribute('channels');
|
||||||
|
}
|
||||||
|
return line;
|
||||||
|
},
|
||||||
|
parse_crypto: function (line) {
|
||||||
|
var parts = line.substring(9).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.tag = parts.shift();
|
||||||
|
data['crypto-suite'] = parts.shift();
|
||||||
|
data['key-params'] = parts.shift();
|
||||||
|
if (parts.length) {
|
||||||
|
data['session-params'] = parts.join(' ');
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_fingerprint: function (line) { // RFC 4572
|
||||||
|
var parts = line.substring(14).split(' '),
|
||||||
|
data = {};
|
||||||
|
data.hash = parts.shift();
|
||||||
|
data.fingerprint = parts.shift();
|
||||||
|
// TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ?
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_fmtp: function (line) {
|
||||||
|
var parts = line.split(' '),
|
||||||
|
i, key, value,
|
||||||
|
data = [];
|
||||||
|
parts.shift();
|
||||||
|
parts = parts.join(' ').split(';');
|
||||||
|
for (i = 0; i < parts.length; i++) {
|
||||||
|
key = parts[i].split('=')[0];
|
||||||
|
while (key.length && key[0] == ' ') {
|
||||||
|
key = key.substring(1);
|
||||||
|
}
|
||||||
|
value = parts[i].split('=')[1];
|
||||||
|
if (key && value) {
|
||||||
|
data.push({name: key, value: value});
|
||||||
|
} else if (key) {
|
||||||
|
// rfc 4733 (DTMF) style stuff
|
||||||
|
data.push({name: '', value: key});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_icecandidate: function (line) {
|
||||||
|
var candidate = {},
|
||||||
|
elems = line.split(' ');
|
||||||
|
candidate.foundation = elems[0].substring(12);
|
||||||
|
candidate.component = elems[1];
|
||||||
|
candidate.protocol = elems[2].toLowerCase();
|
||||||
|
candidate.priority = elems[3];
|
||||||
|
candidate.ip = elems[4];
|
||||||
|
candidate.port = elems[5];
|
||||||
|
// elems[6] => "typ"
|
||||||
|
candidate.type = elems[7];
|
||||||
|
candidate.generation = 0; // default value, may be overwritten below
|
||||||
|
for (var i = 8; i < elems.length; i += 2) {
|
||||||
|
switch (elems[i]) {
|
||||||
|
case 'raddr':
|
||||||
|
candidate['rel-addr'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'rport':
|
||||||
|
candidate['rel-port'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'generation':
|
||||||
|
candidate.generation = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'tcptype':
|
||||||
|
candidate.tcptype = elems[i + 1];
|
||||||
|
break;
|
||||||
|
default: // TODO
|
||||||
|
console.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
candidate.network = '1';
|
||||||
|
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||||
|
return candidate;
|
||||||
|
},
|
||||||
|
build_icecandidate: function (cand) {
|
||||||
|
var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' ');
|
||||||
|
line += ' ';
|
||||||
|
switch (cand.type) {
|
||||||
|
case 'srflx':
|
||||||
|
case 'prflx':
|
||||||
|
case 'relay':
|
||||||
|
if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) {
|
||||||
|
line += 'raddr';
|
||||||
|
line += ' ';
|
||||||
|
line += cand['rel-addr'];
|
||||||
|
line += ' ';
|
||||||
|
line += 'rport';
|
||||||
|
line += ' ';
|
||||||
|
line += cand['rel-port'];
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (cand.hasOwnAttribute('tcptype')) {
|
||||||
|
line += 'tcptype';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.tcptype;
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
line += 'generation';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.hasOwnAttribute('generation') ? cand.generation : '0';
|
||||||
|
return line;
|
||||||
|
},
|
||||||
|
parse_ssrc: function (desc) {
|
||||||
|
// proprietary mapping of a=ssrc lines
|
||||||
|
// TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs
|
||||||
|
// and parse according to that
|
||||||
|
var lines = desc.split('\r\n'),
|
||||||
|
data = {};
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, 7) == 'a=ssrc:') {
|
||||||
|
var idx = lines[i].indexOf(' ');
|
||||||
|
data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_rtcpfb: function (line) {
|
||||||
|
var parts = line.substr(10).split(' ');
|
||||||
|
var data = {};
|
||||||
|
data.pt = parts.shift();
|
||||||
|
data.type = parts.shift();
|
||||||
|
data.params = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
parse_extmap: function (line) {
|
||||||
|
var parts = line.substr(9).split(' ');
|
||||||
|
var data = {};
|
||||||
|
data.value = parts.shift();
|
||||||
|
if (data.value.indexOf('/') != -1) {
|
||||||
|
data.direction = data.value.substr(data.value.indexOf('/') + 1);
|
||||||
|
data.value = data.value.substr(0, data.value.indexOf('/'));
|
||||||
|
} else {
|
||||||
|
data.direction = 'both';
|
||||||
|
}
|
||||||
|
data.uri = parts.shift();
|
||||||
|
data.params = parts;
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
find_line: function (haystack, needle, sessionpart) {
|
||||||
|
var lines = haystack.split('\r\n');
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, needle.length) == needle) {
|
||||||
|
return lines[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!sessionpart) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// search session part
|
||||||
|
lines = sessionpart.split('\r\n');
|
||||||
|
for (var j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substring(0, needle.length) == needle) {
|
||||||
|
return lines[j];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
find_lines: function (haystack, needle, sessionpart) {
|
||||||
|
var lines = haystack.split('\r\n'),
|
||||||
|
needles = [];
|
||||||
|
for (var i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].substring(0, needle.length) == needle)
|
||||||
|
needles.push(lines[i]);
|
||||||
|
}
|
||||||
|
if (needles.length || !sessionpart) {
|
||||||
|
return needles;
|
||||||
|
}
|
||||||
|
// search session part
|
||||||
|
lines = sessionpart.split('\r\n');
|
||||||
|
for (var j = 0; j < lines.length; j++) {
|
||||||
|
if (lines[j].substring(0, needle.length) == needle) {
|
||||||
|
needles.push(lines[j]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return needles;
|
||||||
|
},
|
||||||
|
candidateToJingle: function (line) {
|
||||||
|
// a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0
|
||||||
|
// <candidate component=... foundation=... generation=... id=... ip=... network=... port=... priority=... protocol=... type=.../>
|
||||||
|
if (line.indexOf('candidate:') === 0) {
|
||||||
|
line = 'a=' + line;
|
||||||
|
} else if (line.substring(0, 12) != 'a=candidate:') {
|
||||||
|
console.log('parseCandidate called with a line that is not a candidate line');
|
||||||
|
console.log(line);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (line.substring(line.length - 2) == '\r\n') // chomp it
|
||||||
|
line = line.substring(0, line.length - 2);
|
||||||
|
var candidate = {},
|
||||||
|
elems = line.split(' '),
|
||||||
|
i;
|
||||||
|
if (elems[6] != 'typ') {
|
||||||
|
console.log('did not find typ in the right place');
|
||||||
|
console.log(line);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
candidate.foundation = elems[0].substring(12);
|
||||||
|
candidate.component = elems[1];
|
||||||
|
candidate.protocol = elems[2].toLowerCase();
|
||||||
|
candidate.priority = elems[3];
|
||||||
|
candidate.ip = elems[4];
|
||||||
|
candidate.port = elems[5];
|
||||||
|
// elems[6] => "typ"
|
||||||
|
candidate.type = elems[7];
|
||||||
|
|
||||||
|
candidate.generation = '0'; // default, may be overwritten below
|
||||||
|
for (i = 8; i < elems.length; i += 2) {
|
||||||
|
switch (elems[i]) {
|
||||||
|
case 'raddr':
|
||||||
|
candidate['rel-addr'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'rport':
|
||||||
|
candidate['rel-port'] = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'generation':
|
||||||
|
candidate.generation = elems[i + 1];
|
||||||
|
break;
|
||||||
|
case 'tcptype':
|
||||||
|
candidate.tcptype = elems[i + 1];
|
||||||
|
break;
|
||||||
|
default: // TODO
|
||||||
|
console.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
candidate.network = '1';
|
||||||
|
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||||
|
return candidate;
|
||||||
|
},
|
||||||
|
candidateFromJingle: function (cand) {
|
||||||
|
var line = 'a=candidate:';
|
||||||
|
line += cand.getAttribute('foundation');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('component');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('protocol'); //.toUpperCase(); // chrome M23 doesn't like this
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('priority');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('ip');
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('port');
|
||||||
|
line += ' ';
|
||||||
|
line += 'typ';
|
||||||
|
line += ' ' + cand.getAttribute('type');
|
||||||
|
line += ' ';
|
||||||
|
switch (cand.getAttribute('type')) {
|
||||||
|
case 'srflx':
|
||||||
|
case 'prflx':
|
||||||
|
case 'relay':
|
||||||
|
if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) {
|
||||||
|
line += 'raddr';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('rel-addr');
|
||||||
|
line += ' ';
|
||||||
|
line += 'rport';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('rel-port');
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (cand.getAttribute('protocol').toLowerCase() == 'tcp') {
|
||||||
|
line += 'tcptype';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('tcptype');
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
line += 'generation';
|
||||||
|
line += ' ';
|
||||||
|
line += cand.getAttribute('generation') || '0';
|
||||||
|
return line + '\r\n';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.SDPUtil = SDPUtil;
|
||||||
|
|
||||||
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
/**
|
||||||
|
* Wrapper for built-in http.js to emulate the browser XMLHttpRequest object.
|
||||||
|
*
|
||||||
|
* This can be used with JS designed for browsers to improve reuse of code and
|
||||||
|
* allow the use of existing libraries.
|
||||||
|
*
|
||||||
|
* Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs.
|
||||||
|
*
|
||||||
|
* @todo SSL Support
|
||||||
|
* @author Dan DeFelippi <dan@driverdan.com>
|
||||||
|
* @license MIT
|
||||||
|
*/
|
||||||
|
|
||||||
|
var Url = require("url")
|
||||||
|
,sys = require("util");
|
||||||
|
|
||||||
|
exports.XMLHttpRequest = function() {
|
||||||
|
/**
|
||||||
|
* Private variables
|
||||||
|
*/
|
||||||
|
var self = this;
|
||||||
|
var http = require('http');
|
||||||
|
var https = require('https');
|
||||||
|
|
||||||
|
// Holds http.js objects
|
||||||
|
var client;
|
||||||
|
var request;
|
||||||
|
var response;
|
||||||
|
|
||||||
|
// Request settings
|
||||||
|
var settings = {};
|
||||||
|
|
||||||
|
// Set some default headers
|
||||||
|
var defaultHeaders = {
|
||||||
|
"User-Agent": "node.js",
|
||||||
|
"Accept": "*/*",
|
||||||
|
};
|
||||||
|
|
||||||
|
var headers = defaultHeaders;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constants
|
||||||
|
*/
|
||||||
|
this.UNSENT = 0;
|
||||||
|
this.OPENED = 1;
|
||||||
|
this.HEADERS_RECEIVED = 2;
|
||||||
|
this.LOADING = 3;
|
||||||
|
this.DONE = 4;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Public vars
|
||||||
|
*/
|
||||||
|
// Current state
|
||||||
|
this.readyState = this.UNSENT;
|
||||||
|
|
||||||
|
// default ready state change handler in case one is not set or is set late
|
||||||
|
this.onreadystatechange = function() {};
|
||||||
|
|
||||||
|
// Result & response
|
||||||
|
this.responseText = "";
|
||||||
|
this.responseXML = "";
|
||||||
|
this.status = null;
|
||||||
|
this.statusText = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open the connection. Currently supports local server requests.
|
||||||
|
*
|
||||||
|
* @param string method Connection method (eg GET, POST)
|
||||||
|
* @param string url URL for the connection.
|
||||||
|
* @param boolean async Asynchronous connection. Default is true.
|
||||||
|
* @param string user Username for basic authentication (optional)
|
||||||
|
* @param string password Password for basic authentication (optional)
|
||||||
|
*/
|
||||||
|
this.open = function(method, url, async, user, password) {
|
||||||
|
settings = {
|
||||||
|
"method": method,
|
||||||
|
"url": url,
|
||||||
|
"async": async || null,
|
||||||
|
"user": user || null,
|
||||||
|
"password": password || null
|
||||||
|
};
|
||||||
|
|
||||||
|
this.abort();
|
||||||
|
|
||||||
|
setState(this.OPENED);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets a header for the request.
|
||||||
|
*
|
||||||
|
* @param string header Header name
|
||||||
|
* @param string value Header value
|
||||||
|
*/
|
||||||
|
this.setRequestHeader = function(header, value) {
|
||||||
|
headers[header] = value;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a header from the server response.
|
||||||
|
*
|
||||||
|
* @param string header Name of header to get.
|
||||||
|
* @return string Text of the header or null if it doesn't exist.
|
||||||
|
*/
|
||||||
|
this.getResponseHeader = function(header) {
|
||||||
|
if (this.readyState > this.OPENED && response.headers[header]) {
|
||||||
|
return header + ": " + response.headers[header];
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all the response headers.
|
||||||
|
*
|
||||||
|
* @return string
|
||||||
|
*/
|
||||||
|
this.getAllResponseHeaders = function() {
|
||||||
|
if (this.readyState < this.HEADERS_RECEIVED) {
|
||||||
|
throw "INVALID_STATE_ERR: Headers have not been received.";
|
||||||
|
}
|
||||||
|
var result = "";
|
||||||
|
|
||||||
|
for (var i in response.headers) {
|
||||||
|
result += i + ": " + response.headers[i] + "\r\n";
|
||||||
|
}
|
||||||
|
return result.substr(0, result.length - 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends the request to the server.
|
||||||
|
*
|
||||||
|
* @param string data Optional data to send as request body.
|
||||||
|
*/
|
||||||
|
this.send = function(data) {
|
||||||
|
if (this.readyState != this.OPENED) {
|
||||||
|
throw "INVALID_STATE_ERR: connection must be opened before send() is called";
|
||||||
|
}
|
||||||
|
|
||||||
|
var ssl = false;
|
||||||
|
var url = Url.parse(settings.url);
|
||||||
|
|
||||||
|
// Determine the server
|
||||||
|
switch (url.protocol) {
|
||||||
|
case 'https:':
|
||||||
|
ssl = true;
|
||||||
|
// SSL & non-SSL both need host, no break here.
|
||||||
|
case 'http:':
|
||||||
|
var host = url.hostname;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case undefined:
|
||||||
|
case '':
|
||||||
|
var host = "localhost";
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw "Protocol not supported.";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to port 80. If accessing localhost on another port be sure
|
||||||
|
// to use http://localhost:port/path
|
||||||
|
var port = url.port || (ssl ? 443 : 80);
|
||||||
|
// Add query string if one is used
|
||||||
|
var uri = url.pathname + (url.search ? url.search : '');
|
||||||
|
|
||||||
|
// Set the Host header or the server may reject the request
|
||||||
|
this.setRequestHeader("Host", host);
|
||||||
|
|
||||||
|
// Set content length header
|
||||||
|
if (settings.method == "GET" || settings.method == "HEAD") {
|
||||||
|
data = null;
|
||||||
|
} else if (data) {
|
||||||
|
this.setRequestHeader("Content-Length", Buffer.byteLength(data));
|
||||||
|
|
||||||
|
if (!headers["Content-Type"]) {
|
||||||
|
this.setRequestHeader("Content-Type", "text/plain;charset=UTF-8");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the proper protocol
|
||||||
|
var doRequest = ssl ? https.request : http.request;
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
host: host,
|
||||||
|
port: port,
|
||||||
|
path: uri,
|
||||||
|
method: settings.method,
|
||||||
|
headers: headers,
|
||||||
|
agent: false
|
||||||
|
};
|
||||||
|
|
||||||
|
var req = doRequest(options, function(res) {
|
||||||
|
response = res;
|
||||||
|
response.setEncoding("utf8");
|
||||||
|
|
||||||
|
setState(self.HEADERS_RECEIVED);
|
||||||
|
self.status = response.statusCode;
|
||||||
|
|
||||||
|
response.on('data', function(chunk) {
|
||||||
|
// Make sure there's some data
|
||||||
|
if (chunk) {
|
||||||
|
self.responseText += chunk;
|
||||||
|
}
|
||||||
|
setState(self.LOADING);
|
||||||
|
});
|
||||||
|
|
||||||
|
response.on('end', function() {
|
||||||
|
setState(self.DONE);
|
||||||
|
});
|
||||||
|
|
||||||
|
response.on('error', function() {
|
||||||
|
self.handleError(error);
|
||||||
|
});
|
||||||
|
}).on('error', function(error) {
|
||||||
|
self.handleError(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.setHeader("Connection", "Close");
|
||||||
|
|
||||||
|
// Node 0.4 and later won't accept empty data. Make sure it's needed.
|
||||||
|
if (data) {
|
||||||
|
req.write(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
};
|
||||||
|
|
||||||
|
this.handleError = function(error) {
|
||||||
|
this.status = 503;
|
||||||
|
this.statusText = error;
|
||||||
|
this.responseText = error.stack;
|
||||||
|
setState(this.DONE);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aborts a request.
|
||||||
|
*/
|
||||||
|
this.abort = function() {
|
||||||
|
headers = defaultHeaders;
|
||||||
|
this.readyState = this.UNSENT;
|
||||||
|
this.responseText = "";
|
||||||
|
this.responseXML = "";
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Changes readyState and calls onreadystatechange.
|
||||||
|
*
|
||||||
|
* @param int state New state
|
||||||
|
*/
|
||||||
|
var setState = function(state) {
|
||||||
|
self.readyState = state;
|
||||||
|
self.onreadystatechange();
|
||||||
|
}
|
||||||
|
};
|
||||||
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
// This code was written by Tyler Akins and has been placed in the
|
||||||
|
// public domain. It would be nice if you left this header intact.
|
||||||
|
// Base64 code from Tyler Akins -- http://rumkin.com
|
||||||
|
|
||||||
|
var Base64 = (function () {
|
||||||
|
var keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
/**
|
||||||
|
* Encodes a string in base64
|
||||||
|
* @param {String} input The string to encode in base64.
|
||||||
|
*/
|
||||||
|
encode: function (input) {
|
||||||
|
var output = "";
|
||||||
|
var chr1, chr2, chr3;
|
||||||
|
var enc1, enc2, enc3, enc4;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
do {
|
||||||
|
chr1 = input.charCodeAt(i++);
|
||||||
|
chr2 = input.charCodeAt(i++);
|
||||||
|
chr3 = input.charCodeAt(i++);
|
||||||
|
|
||||||
|
enc1 = chr1 >> 2;
|
||||||
|
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||||
|
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||||
|
enc4 = chr3 & 63;
|
||||||
|
|
||||||
|
if (isNaN(chr2)) {
|
||||||
|
enc3 = enc4 = 64;
|
||||||
|
} else if (isNaN(chr3)) {
|
||||||
|
enc4 = 64;
|
||||||
|
}
|
||||||
|
|
||||||
|
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2) +
|
||||||
|
keyStr.charAt(enc3) + keyStr.charAt(enc4);
|
||||||
|
} while (i < input.length);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes a base64 string.
|
||||||
|
* @param {String} input The string to decode.
|
||||||
|
*/
|
||||||
|
decode: function (input) {
|
||||||
|
var output = "";
|
||||||
|
var chr1, chr2, chr3;
|
||||||
|
var enc1, enc2, enc3, enc4;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
// remove all characters that are not A-Z, a-z, 0-9, +, /, or =
|
||||||
|
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
||||||
|
|
||||||
|
do {
|
||||||
|
enc1 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc2 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc3 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
enc4 = keyStr.indexOf(input.charAt(i++));
|
||||||
|
|
||||||
|
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||||
|
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||||
|
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||||
|
|
||||||
|
output = output + String.fromCharCode(chr1);
|
||||||
|
|
||||||
|
if (enc3 != 64) {
|
||||||
|
output = output + String.fromCharCode(chr2);
|
||||||
|
}
|
||||||
|
if (enc4 != 64) {
|
||||||
|
output = output + String.fromCharCode(chr3);
|
||||||
|
}
|
||||||
|
} while (i < input.length);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Nodify
|
||||||
|
exports.Base64 = Base64;
|
||||||
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
/*
|
||||||
|
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||||
|
* Digest Algorithm, as defined in RFC 1321.
|
||||||
|
* Version 2.1 Copyright (C) Paul Johnston 1999 - 2002.
|
||||||
|
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||||
|
* Distributed under the BSD License
|
||||||
|
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var MD5 = (function () {
|
||||||
|
/*
|
||||||
|
* Configurable variables. You may need to tweak these to be compatible with
|
||||||
|
* the server-side, but the defaults work in most cases.
|
||||||
|
*/
|
||||||
|
var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
|
||||||
|
var b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
|
||||||
|
var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
||||||
|
* to work around bugs in some JS interpreters.
|
||||||
|
*/
|
||||||
|
var safe_add = function (x, y) {
|
||||||
|
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
|
||||||
|
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
||||||
|
return (msw << 16) | (lsw & 0xFFFF);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Bitwise rotate a 32-bit number to the left.
|
||||||
|
*/
|
||||||
|
var bit_rol = function (num, cnt) {
|
||||||
|
return (num << cnt) | (num >>> (32 - cnt));
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert a string to an array of little-endian words
|
||||||
|
* If chrsz is ASCII, characters >255 have their hi-byte silently ignored.
|
||||||
|
*/
|
||||||
|
var str2binl = function (str) {
|
||||||
|
var bin = [];
|
||||||
|
var mask = (1 << chrsz) - 1;
|
||||||
|
for(var i = 0; i < str.length * chrsz; i += chrsz)
|
||||||
|
{
|
||||||
|
bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
|
||||||
|
}
|
||||||
|
return bin;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a string
|
||||||
|
*/
|
||||||
|
var binl2str = function (bin) {
|
||||||
|
var str = "";
|
||||||
|
var mask = (1 << chrsz) - 1;
|
||||||
|
for(var i = 0; i < bin.length * 32; i += chrsz)
|
||||||
|
{
|
||||||
|
str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a hex string.
|
||||||
|
*/
|
||||||
|
var binl2hex = function (binarray) {
|
||||||
|
var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
|
||||||
|
var str = "";
|
||||||
|
for(var i = 0; i < binarray.length * 4; i++)
|
||||||
|
{
|
||||||
|
str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
|
||||||
|
hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convert an array of little-endian words to a base-64 string
|
||||||
|
*/
|
||||||
|
var binl2b64 = function (binarray) {
|
||||||
|
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||||
|
var str = "";
|
||||||
|
var triplet, j;
|
||||||
|
for(var i = 0; i < binarray.length * 4; i += 3)
|
||||||
|
{
|
||||||
|
triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16) |
|
||||||
|
(((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 ) |
|
||||||
|
((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
|
||||||
|
for(j = 0; j < 4; j++)
|
||||||
|
{
|
||||||
|
if(i * 8 + j * 6 > binarray.length * 32) { str += b64pad; }
|
||||||
|
else { str += tab.charAt((triplet >> 6*(3-j)) & 0x3F); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These functions implement the four basic operations the algorithm uses.
|
||||||
|
*/
|
||||||
|
var md5_cmn = function (q, a, b, x, s, t) {
|
||||||
|
return safe_add(bit_rol(safe_add(safe_add(a, q),safe_add(x, t)), s),b);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_ff = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_gg = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_hh = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn(b ^ c ^ d, a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
var md5_ii = function (a, b, c, d, x, s, t) {
|
||||||
|
return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Calculate the MD5 of an array of little-endian words, and a bit length
|
||||||
|
*/
|
||||||
|
var core_md5 = function (x, len) {
|
||||||
|
/* append padding */
|
||||||
|
x[len >> 5] |= 0x80 << ((len) % 32);
|
||||||
|
x[(((len + 64) >>> 9) << 4) + 14] = len;
|
||||||
|
|
||||||
|
var a = 1732584193;
|
||||||
|
var b = -271733879;
|
||||||
|
var c = -1732584194;
|
||||||
|
var d = 271733878;
|
||||||
|
|
||||||
|
var olda, oldb, oldc, oldd;
|
||||||
|
for (var i = 0; i < x.length; i += 16)
|
||||||
|
{
|
||||||
|
olda = a;
|
||||||
|
oldb = b;
|
||||||
|
oldc = c;
|
||||||
|
oldd = d;
|
||||||
|
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
|
||||||
|
a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
|
||||||
|
d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
|
||||||
|
c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
|
||||||
|
b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
|
||||||
|
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
|
||||||
|
a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
|
||||||
|
d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
|
||||||
|
c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
|
||||||
|
b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
|
||||||
|
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
|
||||||
|
a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
|
||||||
|
d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
|
||||||
|
c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
|
||||||
|
b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
|
||||||
|
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
|
||||||
|
a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
|
||||||
|
d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
|
||||||
|
c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
|
||||||
|
b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
|
||||||
|
|
||||||
|
a = safe_add(a, olda);
|
||||||
|
b = safe_add(b, oldb);
|
||||||
|
c = safe_add(c, oldc);
|
||||||
|
d = safe_add(d, oldd);
|
||||||
|
}
|
||||||
|
return [a, b, c, d];
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Calculate the HMAC-MD5, of a key and some data
|
||||||
|
*/
|
||||||
|
var core_hmac_md5 = function (key, data) {
|
||||||
|
var bkey = str2binl(key);
|
||||||
|
if(bkey.length > 16) { bkey = core_md5(bkey, key.length * chrsz); }
|
||||||
|
|
||||||
|
var ipad = new Array(16), opad = new Array(16);
|
||||||
|
for(var i = 0; i < 16; i++)
|
||||||
|
{
|
||||||
|
ipad[i] = bkey[i] ^ 0x36363636;
|
||||||
|
opad[i] = bkey[i] ^ 0x5C5C5C5C;
|
||||||
|
}
|
||||||
|
|
||||||
|
var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
|
||||||
|
return core_md5(opad.concat(hash), 512 + 128);
|
||||||
|
};
|
||||||
|
|
||||||
|
var obj = {
|
||||||
|
/*
|
||||||
|
* These are the functions you'll usually want to call.
|
||||||
|
* They take string arguments and return either hex or base-64 encoded
|
||||||
|
* strings.
|
||||||
|
*/
|
||||||
|
hexdigest: function (s) {
|
||||||
|
return binl2hex(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
b64digest: function (s) {
|
||||||
|
return binl2b64(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
hash: function (s) {
|
||||||
|
return binl2str(core_md5(str2binl(s), s.length * chrsz));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_hexdigest: function (key, data) {
|
||||||
|
return binl2hex(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_b64digest: function (key, data) {
|
||||||
|
return binl2b64(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
hmac_hash: function (key, data) {
|
||||||
|
return binl2str(core_hmac_md5(key, data));
|
||||||
|
},
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Perform a simple self-test to see if the VM is working
|
||||||
|
*/
|
||||||
|
test: function () {
|
||||||
|
return MD5.hexdigest("abc") === "900150983cd24fb0d6963f7d28e17f72";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Nodify
|
||||||
|
exports.MD5 = MD5;
|
||||||
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
File diff suppressed because it is too large
Load Diff
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
var strophe = require("./strophe/strophe.js").Strophe;
|
||||||
|
|
||||||
|
var Strophe = strophe.Strophe;
|
||||||
|
var $iq = strophe.$iq;
|
||||||
|
var $msg = strophe.$msg;
|
||||||
|
var $build = strophe.$build;
|
||||||
|
var $pres = strophe.$pres;
|
||||||
|
|
||||||
|
var jsdom = require("jsdom");
|
||||||
|
var window = jsdom.jsdom().parentWindow;
|
||||||
|
var $ = require('jquery')(window);
|
||||||
|
|
||||||
|
var stropheJingle = require("./strophe.jingle.sdp.js");
|
||||||
|
|
||||||
|
|
||||||
|
var input = '';
|
||||||
|
|
||||||
|
process.stdin.on('readable', function() {
|
||||||
|
var chunk = process.stdin.read();
|
||||||
|
if (chunk !== null) {
|
||||||
|
input += chunk;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.stdin.on('end', function() {
|
||||||
|
if (process.argv[2] == '--jingle') {
|
||||||
|
var elem = $(input);
|
||||||
|
// app does:
|
||||||
|
// sess.setRemoteDescription($(iq).find('>jingle'), 'offer');
|
||||||
|
//console.log(elem.find('>content'));
|
||||||
|
var sdp = new stropheJingle.SDP('');
|
||||||
|
sdp.fromJingle(elem);
|
||||||
|
console.log(sdp.raw);
|
||||||
|
} else if (process.argv[2] == '--sdp') {
|
||||||
|
var sdp = new stropheJingle.SDP(input);
|
||||||
|
var accept = $iq({to: '%(tojid)s',
|
||||||
|
type: 'set'})
|
||||||
|
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||||
|
//action: 'session-accept',
|
||||||
|
action: '%(action)s',
|
||||||
|
initiator: '%(initiator)s',
|
||||||
|
responder: '%(responder)s',
|
||||||
|
sid: '%(sid)s' });
|
||||||
|
sdp.toJingle(accept, 'responder');
|
||||||
|
console.log(Strophe.serialize(accept));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user