1
0

Compare commits

..

9 Commits

Author SHA1 Message Date
Bruno Windels
c5e1de677d Merge branch 'develop' into bwindels/registerasregularuser 2018-09-27 19:18:38 +01:00
Bruno Windels
a776ccd8f5 add changelog 2018-09-21 12:51:55 +02:00
Bruno Windels
48957d2c8c didnt mean to commit this, obviously 2018-09-19 20:57:00 +02:00
Bruno Windels
bd37c4a444 fix logic, store_true/false doesn't set None by default but opposite bool 2018-09-19 20:55:26 +02:00
Bruno Windels
77f1de141d Merge branch 'develop' into bwindels/registerasregularuser 2018-09-19 18:18:40 +02:00
Bruno Windels
81b56f8078 make clear in help text when will be prompted 2018-09-19 18:18:08 +02:00
Bruno Windels
d4c389f0e4 remove not as the flags is set to false 2018-09-19 13:22:19 +02:00
Bruno Windels
4311f861a3 fix PR review 2018-09-19 12:30:32 +02:00
Bruno Windels
ceec6ce5f4 add --regular-user flag to registration script 2018-09-10 19:32:56 +02:00
206 changed files with 2659 additions and 6546 deletions

View File

@@ -4,125 +4,112 @@ jobs:
machine: true
steps:
- checkout
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG} .
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_TAG .
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
- run: docker push matrixdotorg/synapse:$CIRCLE_TAG
dockerhubuploadlatest:
machine: true
steps:
- checkout
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1} .
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 .
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_SHA1 .
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
- run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1} matrixdotorg/synapse:latest
- run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1}-py3 matrixdotorg/synapse:latest-py3
- run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}
- run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}-py3
- run: docker tag matrixdotorg/synapse:$CIRCLE_SHA1 matrixdotorg/synapse:latest
- run: docker push matrixdotorg/synapse:$CIRCLE_SHA1
- run: docker push matrixdotorg/synapse:latest
- run: docker push matrixdotorg/synapse:latest-py3
sytestpy2:
docker:
- image: matrixdotorg/sytest-synapsepy2
working_directory: /src
machine: true
steps:
- checkout
- run: /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy2
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy2postgres:
docker:
- image: matrixdotorg/sytest-synapsepy2
working_directory: /src
machine: true
steps:
- checkout
- run: POSTGRES=1 /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy2
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy2merged:
docker:
- image: matrixdotorg/sytest-synapsepy2
working_directory: /src
machine: true
steps:
- checkout
- run: bash .circleci/merge_base_branch.sh
- run: /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy2
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy2postgresmerged:
docker:
- image: matrixdotorg/sytest-synapsepy2
working_directory: /src
machine: true
steps:
- checkout
- run: bash .circleci/merge_base_branch.sh
- run: POSTGRES=1 /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy2
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy3:
docker:
- image: matrixdotorg/sytest-synapsepy3
working_directory: /src
machine: true
steps:
- checkout
- run: /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy3
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy3
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy3postgres:
docker:
- image: matrixdotorg/sytest-synapsepy3
working_directory: /src
machine: true
steps:
- checkout
- run: POSTGRES=1 /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy3
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy3merged:
docker:
- image: matrixdotorg/sytest-synapsepy3
working_directory: /src
machine: true
steps:
- checkout
- run: bash .circleci/merge_base_branch.sh
- run: /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy3
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy3
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
sytestpy3postgresmerged:
docker:
- image: matrixdotorg/sytest-synapsepy3
working_directory: /src
machine: true
steps:
- checkout
- run: bash .circleci/merge_base_branch.sh
- run: POSTGRES=1 /synapse_sytest.sh
- run: docker pull matrixdotorg/sytest-synapsepy3
- run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
- store_artifacts:
path: /logs
path: ~/project/logs
destination: logs
- store_test_results:
path: /logs
path: logs
workflows:
version: 2
@@ -163,7 +150,7 @@ workflows:
- dockerhubuploadrelease:
filters:
tags:
only: /v[0-9].[0-9]+.[0-9]+.*/
only: /^v[0-9].[0-9]+.[0-9]+(.[0-9]+)?/
branches:
ignore: /.*/
- dockerhubuploadlatest:

View File

@@ -9,16 +9,13 @@ source $BASH_ENV
if [[ -z "${CIRCLE_PR_NUMBER}" ]]
then
echo "Can't figure out what the PR number is! Assuming merge target is develop."
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
# can probably assume it's based on it and will be merged into it.
GITBASE="develop"
else
# Get the reference, using the GitHub API
GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
echo "Can't figure out what the PR number is!"
exit 1
fi
# Get the reference, using the GitHub API
GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
# Show what we are before
git show -s
@@ -31,4 +28,4 @@ git fetch -u origin $GITBASE
git merge --no-edit origin/$GITBASE
# Show what we are after.
git show -s
git show -s

View File

@@ -1,27 +1,12 @@
sudo: false
language: python
cache:
directories:
# we only bother to cache the wheels; parts of the http cache get
# invalidated every build (because they get served with a max-age of 600
# seconds), which means that we end up re-uploading the whole cache for
# every build, which is time-consuming In any case, it's not obvious that
# downloading the cache from S3 would be much faster than downloading the
# originals from pypi.
#
- $HOME/.cache/pip/wheels
# tell travis to cache ~/.cache/pip
cache: pip
# don't clone the whole repo history, one commit will do
git:
depth: 1
# only build branches we care about (PRs are built seperately)
branches:
only:
- master
- develop
- /^release-v/
before_script:
- git remote set-branches --add origin develop
- git fetch origin develop
matrix:
fast_finish: true
@@ -29,8 +14,8 @@ matrix:
- python: 2.7
env: TOX_ENV=packaging
- python: 3.6
env: TOX_ENV="pep8,check_isort"
- python: 2.7
env: TOX_ENV=pep8
- python: 2.7
env: TOX_ENV=py27
@@ -54,14 +39,11 @@ matrix:
services:
- postgresql
- # we only need to check for the newsfragment if it's a PR build
if: type = pull_request
python: 3.6
- python: 3.6
env: TOX_ENV=check_isort
- python: 3.6
env: TOX_ENV=check-newsfragment
script:
- git remote set-branches --add origin develop
- git fetch origin develop
- tox -e $TOX_ENV
install:
- pip install tox

View File

@@ -1,203 +1,11 @@
Synapse 0.33.8rc2 (2018-10-31)
==============================
Bugfixes
--------
- Searches that request profile info now no longer fail with a 500. Fixes
a regression in 0.33.8rc1. ([\#4122](https://github.com/matrix-org/synapse/issues/4122))
Synapse 0.33.8rc1 (2018-10-29)
==============================
Features
--------
- Servers with auto-join rooms will now automatically create those rooms when the first user registers ([\#3975](https://github.com/matrix-org/synapse/issues/3975))
- Add config option to control alias creation ([\#4051](https://github.com/matrix-org/synapse/issues/4051))
- The register_new_matrix_user script is now ported to Python 3. ([\#4085](https://github.com/matrix-org/synapse/issues/4085))
- Configure Docker image to listen on both ipv4 and ipv6. ([\#4089](https://github.com/matrix-org/synapse/issues/4089))
Bugfixes
--------
- Fix HTTP error response codes for federated group requests. ([\#3969](https://github.com/matrix-org/synapse/issues/3969))
- Fix issue where Python 3 users couldn't paginate /publicRooms ([\#4046](https://github.com/matrix-org/synapse/issues/4046))
- Fix URL previewing to work in Python 3.7 ([\#4050](https://github.com/matrix-org/synapse/issues/4050))
- synctl will use the right python executable to run worker processes ([\#4057](https://github.com/matrix-org/synapse/issues/4057))
- Manhole now works again on Python 3, instead of failing with a "couldn't match all kex parts" when connecting. ([\#4060](https://github.com/matrix-org/synapse/issues/4060), [\#4067](https://github.com/matrix-org/synapse/issues/4067))
- Fix some metrics being racy and causing exceptions when polled by Prometheus. ([\#4061](https://github.com/matrix-org/synapse/issues/4061))
- Fix bug which prevented email notifications from being sent unless an absolute path was given for `email_templates`. ([\#4068](https://github.com/matrix-org/synapse/issues/4068))
- Correctly account for cpu usage by background threads ([\#4074](https://github.com/matrix-org/synapse/issues/4074))
- Fix race condition where config defined reserved users were not being added to
the monthly active user list prior to the homeserver reactor firing up ([\#4081](https://github.com/matrix-org/synapse/issues/4081))
- Fix bug which prevented backslashes being used in event field filters ([\#4083](https://github.com/matrix-org/synapse/issues/4083))
Internal Changes
----------------
- Add information about the [matrix-docker-ansible-deploy](https://github.com/spantaleev/matrix-docker-ansible-deploy) playbook ([\#3698](https://github.com/matrix-org/synapse/issues/3698))
- Add initial implementation of new state resolution algorithm ([\#3786](https://github.com/matrix-org/synapse/issues/3786))
- Reduce database load when fetching state groups ([\#4011](https://github.com/matrix-org/synapse/issues/4011))
- Various cleanups in the federation client code ([\#4031](https://github.com/matrix-org/synapse/issues/4031))
- Run the CircleCI builds in docker containers ([\#4041](https://github.com/matrix-org/synapse/issues/4041))
- Only colourise synctl output when attached to tty ([\#4049](https://github.com/matrix-org/synapse/issues/4049))
- Refactor room alias creation code ([\#4063](https://github.com/matrix-org/synapse/issues/4063))
- Make the Python scripts in the top-level scripts folders meet pep8 and pass flake8. ([\#4068](https://github.com/matrix-org/synapse/issues/4068))
- The README now contains example for the Caddy web server. Contributed by steamp0rt. ([\#4072](https://github.com/matrix-org/synapse/issues/4072))
- Add psutil as an explicit dependency ([\#4073](https://github.com/matrix-org/synapse/issues/4073))
- Clean up threading and logcontexts in pushers ([\#4075](https://github.com/matrix-org/synapse/issues/4075))
- Correctly manage logcontexts during startup to fix some "Unexpected logging context" warnings ([\#4076](https://github.com/matrix-org/synapse/issues/4076))
- Give some more things logcontexts ([\#4077](https://github.com/matrix-org/synapse/issues/4077))
- Clean up some bits of code which were flagged by the linter ([\#4082](https://github.com/matrix-org/synapse/issues/4082))
Synapse 0.33.7 (2018-10-18)
===========================
**Warning**: This release removes the example email notification templates from
`res/templates` (they are now internal to the python package). This should only
affect you if you (a) deploy your Synapse instance from a git checkout or a
github snapshot URL, and (b) have email notifications enabled.
If you have email notifications enabled, you should ensure that
`email.template_dir` is either configured to point at a directory where you
have installed customised templates, or leave it unset to use the default
templates.
Synapse 0.33.7rc2 (2018-10-17)
==============================
Features
--------
- Ship the example email templates as part of the package ([\#4052](https://github.com/matrix-org/synapse/issues/4052))
Bugfixes
--------
- Fix bug which made get_missing_events return too few events ([\#4045](https://github.com/matrix-org/synapse/issues/4045))
Synapse 0.33.7rc1 (2018-10-15)
==============================
Features
--------
- Add support for end-to-end key backup (MSC1687) ([\#4019](https://github.com/matrix-org/synapse/issues/4019))
Bugfixes
--------
- Fix bug in event persistence logic which caused 'NoneType is not iterable' ([\#3995](https://github.com/matrix-org/synapse/issues/3995))
- Fix exception in background metrics collection ([\#3996](https://github.com/matrix-org/synapse/issues/3996))
- Fix exception handling in fetching remote profiles ([\#3997](https://github.com/matrix-org/synapse/issues/3997))
- Fix handling of rejected threepid invites ([\#3999](https://github.com/matrix-org/synapse/issues/3999))
- Workers now start on Python 3. ([\#4027](https://github.com/matrix-org/synapse/issues/4027))
- Synapse now starts on Python 3.7. ([\#4033](https://github.com/matrix-org/synapse/issues/4033))
Internal Changes
----------------
- Log exceptions in looping calls ([\#4008](https://github.com/matrix-org/synapse/issues/4008))
- Optimisation for serving federation requests ([\#4017](https://github.com/matrix-org/synapse/issues/4017))
- Add metric to count number of non-empty sync responses ([\#4022](https://github.com/matrix-org/synapse/issues/4022))
Synapse 0.33.6 (2018-10-04)
===========================
Internal Changes
----------------
- Pin to prometheus_client<0.4 to avoid renaming all of our metrics ([\#4002](https://github.com/matrix-org/synapse/issues/4002))
Synapse 0.33.6rc1 (2018-10-03)
==============================
Features
--------
- Adding the ability to change MAX_UPLOAD_SIZE for the docker container variables. ([\#3883](https://github.com/matrix-org/synapse/issues/3883))
- Report "python_version" in the phone home stats ([\#3894](https://github.com/matrix-org/synapse/issues/3894))
- Always LL ourselves if we're in a room ([\#3916](https://github.com/matrix-org/synapse/issues/3916))
- Include eventid in log lines when processing incoming federation transactions ([\#3959](https://github.com/matrix-org/synapse/issues/3959))
- Remove spurious check which made 'localhost' servers not work ([\#3964](https://github.com/matrix-org/synapse/issues/3964))
Bugfixes
--------
- Fix problem when playing media from Chrome using direct URL (thanks @remjey!) ([\#3578](https://github.com/matrix-org/synapse/issues/3578))
- support registering regular users non-interactively with register_new_matrix_user script ([\#3836](https://github.com/matrix-org/synapse/issues/3836))
- Fix broken invite email links for self hosted riots ([\#3868](https://github.com/matrix-org/synapse/issues/3868))
- Don't ratelimit autojoins ([\#3879](https://github.com/matrix-org/synapse/issues/3879))
- Fix 500 error when deleting unknown room alias ([\#3889](https://github.com/matrix-org/synapse/issues/3889))
- Fix some b'abcd' noise in logs and metrics ([\#3892](https://github.com/matrix-org/synapse/issues/3892), [\#3895](https://github.com/matrix-org/synapse/issues/3895))
- When we join a room, always try the server we used for the alias lookup first, to avoid unresponsive and out-of-date servers. ([\#3899](https://github.com/matrix-org/synapse/issues/3899))
- Fix incorrect server-name indication for outgoing federation requests ([\#3907](https://github.com/matrix-org/synapse/issues/3907))
- Fix adding client IPs to the database failing on Python 3. ([\#3908](https://github.com/matrix-org/synapse/issues/3908))
- Fix bug where things occaisonally were not being timed out correctly. ([\#3910](https://github.com/matrix-org/synapse/issues/3910))
- Fix bug where outbound federation would stop talking to some servers when using workers ([\#3914](https://github.com/matrix-org/synapse/issues/3914))
- Fix some instances of ExpiringCache not expiring cache items ([\#3932](https://github.com/matrix-org/synapse/issues/3932), [\#3980](https://github.com/matrix-org/synapse/issues/3980))
- Fix out-of-bounds error when LLing yourself ([\#3936](https://github.com/matrix-org/synapse/issues/3936))
- Sending server notices regarding user consent now works on Python 3. ([\#3938](https://github.com/matrix-org/synapse/issues/3938))
- Fix exceptions from metrics handler ([\#3956](https://github.com/matrix-org/synapse/issues/3956))
- Fix error message for events with m.room.create missing from auth_events ([\#3960](https://github.com/matrix-org/synapse/issues/3960))
- Fix errors due to concurrent monthly_active_user upserts ([\#3961](https://github.com/matrix-org/synapse/issues/3961))
- Fix exceptions when processing incoming events over federation ([\#3968](https://github.com/matrix-org/synapse/issues/3968))
- Replaced all occurences of e.message with str(e). Contributed by Schnuffle ([\#3970](https://github.com/matrix-org/synapse/issues/3970))
- Fix lazy loaded sync in the presence of rejected state events ([\#3986](https://github.com/matrix-org/synapse/issues/3986))
- Fix error when logging incomplete HTTP requests ([\#3990](https://github.com/matrix-org/synapse/issues/3990))
Internal Changes
----------------
- Unit tests can now be run under PostgreSQL in Docker using ``test_postgresql.sh``. ([\#3699](https://github.com/matrix-org/synapse/issues/3699))
- Speed up calculation of typing updates for replication ([\#3794](https://github.com/matrix-org/synapse/issues/3794))
- Remove documentation regarding installation on Cygwin, the use of WSL is recommended instead. ([\#3873](https://github.com/matrix-org/synapse/issues/3873))
- Fix typo in README, synaspse -> synapse ([\#3897](https://github.com/matrix-org/synapse/issues/3897))
- Increase the timeout when filling missing events in federation requests ([\#3903](https://github.com/matrix-org/synapse/issues/3903))
- Improve the logging when handling a federation transaction ([\#3904](https://github.com/matrix-org/synapse/issues/3904), [\#3966](https://github.com/matrix-org/synapse/issues/3966))
- Improve logging of outbound federation requests ([\#3906](https://github.com/matrix-org/synapse/issues/3906), [\#3909](https://github.com/matrix-org/synapse/issues/3909))
- Fix the docker image building on python 3 ([\#3911](https://github.com/matrix-org/synapse/issues/3911))
- Add a regression test for logging failed HTTP requests on Python 3. ([\#3912](https://github.com/matrix-org/synapse/issues/3912))
- Comments and interface cleanup for on_receive_pdu ([\#3924](https://github.com/matrix-org/synapse/issues/3924))
- Fix spurious exceptions when remote http client closes conncetion ([\#3925](https://github.com/matrix-org/synapse/issues/3925))
- Log exceptions thrown by background tasks ([\#3927](https://github.com/matrix-org/synapse/issues/3927))
- Add a cache to get_destination_retry_timings ([\#3933](https://github.com/matrix-org/synapse/issues/3933), [\#3991](https://github.com/matrix-org/synapse/issues/3991))
- Automate pushes to docker hub ([\#3946](https://github.com/matrix-org/synapse/issues/3946))
- Require attrs 16.0.0 or later ([\#3947](https://github.com/matrix-org/synapse/issues/3947))
- Fix incompatibility with python3 on alpine ([\#3948](https://github.com/matrix-org/synapse/issues/3948))
- Run the test suite on the oldest supported versions of our dependencies in CI. ([\#3952](https://github.com/matrix-org/synapse/issues/3952))
- CircleCI now only runs merged jobs on PRs, and commit jobs on develop, master, and release branches. ([\#3957](https://github.com/matrix-org/synapse/issues/3957))
- Fix docstrings and add tests for state store methods ([\#3958](https://github.com/matrix-org/synapse/issues/3958))
- fix docstring for FederationClient.get_state_for_room ([\#3963](https://github.com/matrix-org/synapse/issues/3963))
- Run notify_app_services as a bg process ([\#3965](https://github.com/matrix-org/synapse/issues/3965))
- Clarifications in FederationHandler ([\#3967](https://github.com/matrix-org/synapse/issues/3967))
- Further reduce the docker image size ([\#3972](https://github.com/matrix-org/synapse/issues/3972))
- Build py3 docker images for docker hub too ([\#3976](https://github.com/matrix-org/synapse/issues/3976))
- Updated the installation instructions to point to the matrix-synapse package on PyPI. ([\#3985](https://github.com/matrix-org/synapse/issues/3985))
- Disable USE_FROZEN_DICTS for unittests by default. ([\#3987](https://github.com/matrix-org/synapse/issues/3987))
- Remove unused Jenkins and development related files from the repo. ([\#3988](https://github.com/matrix-org/synapse/issues/3988))
- Improve stacktraces in certain exceptions in the logs ([\#3989](https://github.com/matrix-org/synapse/issues/3989))
Synapse 0.33.5.1 (2018-09-25)
=============================
Internal Changes
----------------
- Fix incompatibility with older Twisted version in tests. Thanks @OlegGirko! ([\#3940](https://github.com/matrix-org/synapse/issues/3940))
- Fix incompatibility with older Twisted version in tests. Thanks
@OlegGirko! ([\#3940](https://github.com/matrix-org/synapse/issues/3940))
Synapse 0.33.5 (2018-09-24)

View File

@@ -12,20 +12,24 @@ recursive-include synapse/storage/schema *.sql
recursive-include synapse/storage/schema *.py
recursive-include docs *
recursive-include res *
recursive-include scripts *
recursive-include scripts-dev *
recursive-include synapse *.pyi
recursive-include tests *.py
recursive-include synapse/res *
recursive-include synapse/static *.css
recursive-include synapse/static *.gif
recursive-include synapse/static *.html
recursive-include synapse/static *.js
exclude jenkins.sh
exclude jenkins*.sh
exclude jenkins*
exclude Dockerfile
exclude .dockerignore
exclude test_postgresql.sh
recursive-exclude jenkins *.sh
include pyproject.toml
recursive-include changelog.d *
@@ -34,6 +38,3 @@ prune .github
prune demo/etc
prune docker
prune .circleci
exclude jenkins*
recursive-exclude jenkins *.sh

35
MAP.rst Normal file
View File

@@ -0,0 +1,35 @@
Directory Structure
===================
Warning: this may be a bit stale...
::
.
├── cmdclient Basic CLI python Matrix client
├── demo Scripts for running standalone Matrix demos
├── docs All doc, including the draft Matrix API spec
│   ├── client-server The client-server Matrix API spec
│   ├── model Domain-specific elements of the Matrix API spec
│   ├── server-server The server-server model of the Matrix API spec
│   └── sphinx The internal API doc of the Synapse homeserver
├── experiments Early experiments of using Synapse's internal APIs
├── graph Visualisation of Matrix's distributed message store
├── synapse The reference Matrix homeserver implementation
│   ├── api Common building blocks for the APIs
│   │   ├── events Definition of state representation Events
│   │   └── streams Definition of streamable Event objects
│   ├── app The __main__ entry point for the homeserver
│   ├── crypto The PKI client/server used for secure federation
│   │   └── resource PKI helper objects (e.g. keys)
│   ├── federation Server-server state replication logic
│   ├── handlers The main business logic of the homeserver
│   ├── http Wrappers around Twisted's HTTP server & client
│   ├── rest Servlet-style RESTful API
│   ├── storage Persistence subsystem (currently only sqlite3)
│   │   └── schema sqlite persistence schema
│   └── util Synapse-specific utilities
├── tests Unit tests for the Synapse homeserver
└── webclient Basic AngularJS Matrix web client

View File

@@ -81,7 +81,7 @@ Thanks for using Matrix!
Synapse Installation
====================
Synapse is the reference Python/Twisted Matrix homeserver implementation.
Synapse is the reference python/twisted Matrix homeserver implementation.
System requirements:
@@ -91,13 +91,12 @@ System requirements:
Installing from source
----------------------
(Prebuilt packages are available for some platforms - see `Platform-Specific
Instructions`_.)
Synapse is written in Python but some of the libraries it uses are written in
C. So before we can install Synapse itself we need a working C compiler and the
header files for Python C extensions.
Synapse is written in python but some of the libraries it uses are written in
C. So before we can install synapse itself we need a working C compiler and the
header files for python C extensions.
Installing prerequisites on Ubuntu or Debian::
@@ -144,24 +143,18 @@ Installing prerequisites on OpenBSD::
doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
libxslt
To install the Synapse homeserver run::
To install the synapse homeserver run::
virtualenv -p python2.7 ~/.synapse
source ~/.synapse/bin/activate
pip install --upgrade pip
pip install --upgrade setuptools
pip install matrix-synapse
pip install https://github.com/matrix-org/synapse/tarball/master
This installs Synapse, along with the libraries it uses, into a virtual
This installs synapse, along with the libraries it uses, into a virtual
environment under ``~/.synapse``. Feel free to pick a different directory
if you prefer.
This Synapse installation can then be later upgraded by using pip again with the
update flag::
source ~/.synapse/bin/activate
pip install -U matrix-synapse
In case of problems, please see the _`Troubleshooting` section below.
There is an offical synapse image available at
@@ -174,13 +167,7 @@ Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
Dockerfile to automate a synapse server in a single Docker image, at
https://hub.docker.com/r/avhost/docker-matrix/tags/
Slavi Pantaleev has created an Ansible playbook,
which installs the offical Docker image of Matrix Synapse
along with many other Matrix-related services (Postgres database, riot-web, coturn, mxisd, SSL support, etc.).
For more details, see
https://github.com/spantaleev/matrix-docker-ansible-deploy
Configuring Synapse
Configuring synapse
-------------------
Before you can start Synapse, you will need to generate a configuration
@@ -262,6 +249,26 @@ Setting up a TURN server
For reliable VoIP calls to be routed via this homeserver, you MUST configure
a TURN server. See `<docs/turn-howto.rst>`_ for details.
IPv6
----
As of Synapse 0.19 we finally support IPv6, many thanks to @kyrias and @glyph
for providing PR #1696.
However, for federation to work on hosts with IPv6 DNS servers you **must**
be running Twisted 17.1.0 or later - see https://github.com/matrix-org/synapse/issues/1002
for details. We can't make Synapse depend on Twisted 17.1 by default
yet as it will break most older distributions (see https://github.com/matrix-org/synapse/pull/1909)
so if you are using operating system dependencies you'll have to install your
own Twisted 17.1 package via pip or backports etc.
If you're running in a virtualenv then pip should have installed the newest
Twisted automatically, but if your virtualenv is old you will need to manually
upgrade to a newer Twisted dependency via:
pip install Twisted>=17.1.0
Running Synapse
===============
@@ -437,7 +444,8 @@ settings require a slightly more difficult installation process.
using the ``.`` command, rather than ``bash``'s ``source``.
5) Optionally, use ``pip`` to install ``lxml``, which Synapse needs to parse
webpages for their titles.
6) Use ``pip`` to install this repository: ``pip install matrix-synapse``
6) Use ``pip`` to install this repository: ``pip install
https://github.com/matrix-org/synapse/tarball/master``
7) Optionally, change ``_synapse``'s shell to ``/bin/false`` to reduce the
chance of a compromised Synapse server being used to take over your box.
@@ -465,7 +473,7 @@ Troubleshooting
Troubleshooting Installation
----------------------------
Synapse requires pip 8 or later, so if your OS provides too old a version you
Synapse requires pip 1.7 or later, so if your OS provides too old a version you
may need to manually upgrade it::
sudo pip install --upgrade pip
@@ -500,6 +508,28 @@ failing, e.g.::
pip install twisted
On OS X, if you encounter clang: error: unknown argument: '-mno-fused-madd' you
will need to export CFLAGS=-Qunused-arguments.
Troubleshooting Running
-----------------------
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
to manually upgrade PyNaCL, as synapse uses NaCl (https://nacl.cr.yp.to/) for
encryption and digital signatures.
Unfortunately PyNACL currently has a few issues
(https://github.com/pyca/pynacl/issues/53) and
(https://github.com/pyca/pynacl/issues/79) that mean it may not install
correctly, causing all tests to fail with errors about missing "sodium.h". To
fix try re-installing from PyPI or directly from
(https://github.com/pyca/pynacl)::
# Install from PyPI
pip install --user --upgrade --force pynacl
# Install from github
pip install --user https://github.com/pyca/pynacl/tarball/master
Running out of File Handles
~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -657,8 +687,7 @@ Using a reverse proxy with Synapse
It is recommended to put a reverse proxy such as
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
`Caddy <https://caddyserver.com/docs/proxy>`_ or
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_ or
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
doing so is that it means that you can expose the default https port (443) to
Matrix clients without needing to run Synapse with root privileges.
@@ -689,15 +718,7 @@ so an example nginx configuration might look like::
}
}
an example Caddy configuration might look like::
matrix.example.com {
proxy /_matrix http://localhost:8008 {
transparent
}
}
and an example Apache configuration might look like::
and an example apache configuration may look like::
<VirtualHost *:443>
SSLEngine on

View File

@@ -18,7 +18,7 @@ instructions that may be required are listed later in this document.
.. code:: bash
pip install --upgrade --process-dependency-links matrix-synapse
pip install --upgrade --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
# restart synapse
synctl restart
@@ -48,24 +48,11 @@ returned by the Client-Server API:
# configured on port 443.
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
Upgrading to v0.33.7
====================
This release removes the example email notification templates from
``res/templates`` (they are now internal to the python package). This should
only affect you if you (a) deploy your Synapse instance from a git checkout or
a github snapshot URL, and (b) have email notifications enabled.
If you have email notifications enabled, you should ensure that
``email.template_dir`` is either configured to point at a directory where you
have installed customised templates, or leave it unset to use the default
templates.
Upgrading to v0.27.3
Upgrading to $NEXT_VERSION
====================
This release expands the anonymous usage stats sent if the opt-in
``report_stats`` configuration is set to ``true``. We now capture RSS memory
``report_stats`` configuration is set to ``true``. We now capture RSS memory
and cpu use at a very coarse level. This requires administrators to install
the optional ``psutil`` python module.

1
changelog.d/3578.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix problem when playing media from Chrome using direct URL (thanks @remjey!)

2
changelog.d/3699.misc Normal file
View File

@@ -0,0 +1,2 @@
Unit tests can now be run under PostgreSQL in Docker using
``test_postgresql.sh``.

1
changelog.d/3794.misc Normal file
View File

@@ -0,0 +1 @@
Speed up calculation of typing updates for replication

1
changelog.d/3836.bugfix Normal file
View File

@@ -0,0 +1 @@
support registering regular users non-interactively with register_new_matrix_user script

1
changelog.d/3868.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix broken invite email links for self hosted riots

2
changelog.d/3873.misc Normal file
View File

@@ -0,0 +1,2 @@
Remove documentation regarding installation on Cygwin, the use of WSL is
recommended instead.

1
changelog.d/3879.bugfix Normal file
View File

@@ -0,0 +1 @@
Don't ratelimit autojoins

1
changelog.d/3883.feature Normal file
View File

@@ -0,0 +1 @@
Adding the ability to change MAX_UPLOAD_SIZE for the docker container variables.

1
changelog.d/3889.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix 500 error when deleting unknown room alias

1
changelog.d/3892.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix some b'abcd' noise in logs and metrics

1
changelog.d/3894.feature Normal file
View File

@@ -0,0 +1 @@
Report "python_version" in the phone home stats

1
changelog.d/3895.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix some b'abcd' noise in logs and metrics

1
changelog.d/3897.misc Normal file
View File

@@ -0,0 +1 @@
Fix typo in README, synaspse -> synapse

1
changelog.d/3899.bugfix Normal file
View File

@@ -0,0 +1 @@
When we join a room, always try the server we used for the alias lookup first, to avoid unresponsive and out-of-date servers.

1
changelog.d/3903.misc Normal file
View File

@@ -0,0 +1 @@
Increase the timeout when filling missing events in federation requests

1
changelog.d/3904.misc Normal file
View File

@@ -0,0 +1 @@
Improve the logging when handling a federation transaction

1
changelog.d/3906.misc Normal file
View File

@@ -0,0 +1 @@
Improve logging of outbound federation requests

1
changelog.d/3907.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix incorrect server-name indication for outgoing federation requests

1
changelog.d/3908.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix adding client IPs to the database failing on Python 3.

1
changelog.d/3909.misc Normal file
View File

@@ -0,0 +1 @@
Improve logging of outbound federation requests

1
changelog.d/3910.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix bug where things occaisonally were not being timed out correctly.

1
changelog.d/3911.misc Normal file
View File

@@ -0,0 +1 @@
Fix the docker image building on python 3

1
changelog.d/3912.misc Normal file
View File

@@ -0,0 +1 @@
Add a regression test for logging failed HTTP requests on Python 3.

1
changelog.d/3914.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix bug where outbound federation would stop talking to some servers when using workers

1
changelog.d/3916.feature Normal file
View File

@@ -0,0 +1 @@
Always LL ourselves if we're in a room

1
changelog.d/3924.misc Normal file
View File

@@ -0,0 +1 @@
Comments and interface cleanup for on_receive_pdu

1
changelog.d/3925.misc Normal file
View File

@@ -0,0 +1 @@
Fix spurious exceptions when remote http client closes conncetion

1
changelog.d/3927.misc Normal file
View File

@@ -0,0 +1 @@
Log exceptions thrown by background tasks

1
changelog.d/3932.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix some instances of ExpiringCache not expiring cache items

1
changelog.d/3936.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix out-of-bounds error when LLing yourself

1
changelog.d/3946.misc Normal file
View File

@@ -0,0 +1 @@
Automate pushes to docker hub

1
changelog.d/3947.misc Normal file
View File

@@ -0,0 +1 @@
Require attrs 16.0.0 or later

1
changelog.d/3948.misc Normal file
View File

@@ -0,0 +1 @@
Fix incompatibility with python3 on alpine

1
changelog.d/3952.misc Normal file
View File

@@ -0,0 +1 @@
Run the test suite on the oldest supported versions of our dependencies in CI.

1
changelog.d/3956.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix exceptions from metrics handler

1
changelog.d/3957.misc Normal file
View File

@@ -0,0 +1 @@
CircleCI now only runs merged jobs on PRs, and commit jobs on develop, master, and release branches.

1
changelog.d/3958.misc Normal file
View File

@@ -0,0 +1 @@
Fix docstrings and add tests for state store methods

1
changelog.d/3959.feature Normal file
View File

@@ -0,0 +1 @@
Include eventid in log lines when processing incoming federation transactions

1
changelog.d/3961.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix errors due to concurrent monthly_active_user upserts

1
changelog.d/3963.misc Normal file
View File

@@ -0,0 +1 @@
fix docstring for FederationClient.get_state_for_room

1
changelog.d/3965.misc Normal file
View File

@@ -0,0 +1 @@
Run notify_app_services as a bg process

1
changelog.d/3966.misc Normal file
View File

@@ -0,0 +1 @@
Improve the logging when handling a federation transaction

1
changelog.d/3967.misc Normal file
View File

@@ -0,0 +1 @@
Clarifications in FederationHandler

1
changelog.d/3970.bugfix Normal file
View File

@@ -0,0 +1 @@
Replaced all occurences of e.message with str(e). Contributed by Schnuffle

View File

@@ -1,13 +1,9 @@
ARG PYTHON_VERSION=2
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
###
### Stage 0: builder
###
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8 as builder
COPY . /synapse
# install the OS build deps
RUN apk add \
RUN apk add --no-cache --virtual .build_deps \
build-base \
libffi-dev \
libjpeg-turbo-dev \
@@ -15,47 +11,30 @@ RUN apk add \
libxslt-dev \
linux-headers \
postgresql-dev \
zlib-dev
# build things which have slow build steps, before we copy synapse, so that
# the layer can be cached.
#
# (we really just care about caching a wheel here, as the "pip install" below
# will install them again.)
RUN pip install --prefix="/install" --no-warn-script-location \
cryptography \
msgpack-python \
pillow \
pynacl
# now install synapse and all of the python deps to /install.
COPY . /synapse
RUN pip install --prefix="/install" --no-warn-script-location \
lxml \
psycopg2 \
/synapse
###
### Stage 1: runtime
###
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
RUN apk add --no-cache --virtual .runtime_deps \
libffi \
zlib-dev \
&& cd /synapse \
&& apk add --no-cache --virtual .runtime_deps \
libffi \
libjpeg-turbo \
libressl \
libxslt \
libpq \
zlib \
su-exec
COPY --from=builder /install /usr/local
COPY ./docker/start.py /start.py
COPY ./docker/conf /conf
libressl \
libxslt \
libpq \
zlib \
su-exec \
&& pip install --upgrade \
lxml \
pip \
psycopg2 \
setuptools \
&& mkdir -p /synapse/cache \
&& pip install -f /synapse/cache --upgrade --process-dependency-links . \
&& mv /synapse/docker/start.py /synapse/docker/conf / \
&& rm -rf \
setup.cfg \
setup.py \
synapse \
&& apk del .build_deps
VOLUME ["/data"]
EXPOSE 8008/tcp 8448/tcp

View File

@@ -21,7 +21,7 @@ listeners:
{% if not SYNAPSE_NO_TLS %}
-
port: 8448
bind_addresses: ['::']
bind_addresses: ['0.0.0.0']
type: http
tls: true
x_forwarded: false
@@ -34,7 +34,7 @@ listeners:
- port: 8008
tls: false
bind_addresses: ['::']
bind_addresses: ['0.0.0.0']
type: http
x_forwarded: false
@@ -211,9 +211,7 @@ email:
require_transport_security: False
notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}"
app_name: Matrix
# if template_dir is unset, uses the example templates that are part of
# the Synapse distribution.
#template_dir: res/templates
template_dir: res/templates
notif_template_html: notif_mail.html
notif_template_text: notif_mail.txt
notif_for_new_users: True

View File

@@ -0,0 +1,23 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export WORKSPACE
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
export HAPROXY_BIN=/home/haproxy/haproxy-1.6.11/haproxy
./jenkins/prepare_synapse.sh
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
./jenkins/clone.sh dendron https://github.com/matrix-org/dendron.git
./dendron/jenkins/build_dendron.sh
./sytest/jenkins/prep_sytest_for_postgres.sh
./sytest/jenkins/install_and_run.sh \
--python $WORKSPACE/.tox/py27/bin/python \
--synapse-directory $WORKSPACE \
--dendron $WORKSPACE/dendron/bin/dendron \
--haproxy \

20
jenkins-dendron-postgres.sh Executable file
View File

@@ -0,0 +1,20 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export WORKSPACE
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
./jenkins/prepare_synapse.sh
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
./jenkins/clone.sh dendron https://github.com/matrix-org/dendron.git
./dendron/jenkins/build_dendron.sh
./sytest/jenkins/prep_sytest_for_postgres.sh
./sytest/jenkins/install_and_run.sh \
--python $WORKSPACE/.tox/py27/bin/python \
--synapse-directory $WORKSPACE \
--dendron $WORKSPACE/dendron/bin/dendron \

22
jenkins-flake8.sh Executable file
View File

@@ -0,0 +1,22 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
# Write coverage reports to a separate file for each process
export COVERAGE_OPTS="-p"
export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
export PEP8SUFFIX="--output-file=violations.flake8.log"
rm .coverage* || echo "No coverage files to remove"
tox -e packaging -e pep8

18
jenkins-postgres.sh Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export WORKSPACE
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
./jenkins/prepare_synapse.sh
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
./sytest/jenkins/prep_sytest_for_postgres.sh
./sytest/jenkins/install_and_run.sh \
--python $WORKSPACE/.tox/py27/bin/python \
--synapse-directory $WORKSPACE \

16
jenkins-sqlite.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export WORKSPACE
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
./jenkins/prepare_synapse.sh
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
./sytest/jenkins/install_and_run.sh \
--python $WORKSPACE/.tox/py27/bin/python \
--synapse-directory $WORKSPACE \

30
jenkins-unittests.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
# Write coverage reports to a separate file for each process
export COVERAGE_OPTS="-p"
export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
# Don't exit with non-0 status code on Jenkins,
# so that the build steps continue and a later step can decided whether to
# UNSTABLE or FAILURE this build.
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
rm .coverage* || echo "No coverage files to remove"
tox --notest -e py27
TOX_BIN=$WORKSPACE/.tox/py27/bin
python synapse/python_dependencies.py | xargs -n1 $TOX_BIN/pip install
$TOX_BIN/pip install lxml
tox -e py27

44
jenkins/clone.sh Executable file
View File

@@ -0,0 +1,44 @@
#! /bin/bash
# This clones a project from github into a named subdirectory
# If the project has a branch with the same name as this branch
# then it will checkout that branch after cloning.
# Otherwise it will checkout "origin/develop."
# The first argument is the name of the directory to checkout
# the branch into.
# The second argument is the URL of the remote repository to checkout.
# Usually something like https://github.com/matrix-org/sytest.git
set -eux
NAME=$1
PROJECT=$2
BASE=".$NAME-base"
# Update our mirror.
if [ ! -d ".$NAME-base" ]; then
# Create a local mirror of the source repository.
# This saves us from having to download the entire repository
# when this script is next run.
git clone "$PROJECT" "$BASE" --mirror
else
# Fetch any updates from the source repository.
(cd "$BASE"; git fetch -p)
fi
# Remove the existing repository so that we have a clean copy
rm -rf "$NAME"
# Cloning with --shared means that we will share portions of the
# .git directory with our local mirror.
git clone "$BASE" "$NAME" --shared
# Jenkins may have supplied us with the name of the branch in the
# environment. Otherwise we will have to guess based on the current
# commit.
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
cd "$NAME"
# check out the relevant branch
git checkout "${GIT_BRANCH}" || (
echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop"
git checkout "origin/develop"
)

View File

@@ -1,20 +1,21 @@
from __future__ import print_function
from synapse.events import FrozenEvent
from synapse.api.auth import Auth
from mock import Mock
import argparse
import itertools
import json
import sys
from mock import Mock
from synapse.api.auth import Auth
from synapse.events import FrozenEvent
def check_auth(auth, auth_chain, events):
auth_chain.sort(key=lambda e: e.depth)
auth_map = {e.event_id: e for e in auth_chain}
auth_map = {
e.event_id: e
for e in auth_chain
}
create_events = {}
for e in auth_chain:
@@ -24,26 +25,31 @@ def check_auth(auth, auth_chain, events):
for e in itertools.chain(auth_chain, events):
auth_events_list = [auth_map[i] for i, _ in e.auth_events]
auth_events = {(e.type, e.state_key): e for e in auth_events_list}
auth_events = {
(e.type, e.state_key): e
for e in auth_events_list
}
auth_events[("m.room.create", "")] = create_events[e.room_id]
try:
auth.check(e, auth_events=auth_events)
except Exception as ex:
print("Failed:", e.event_id, e.type, e.state_key)
print("Auth_events:", auth_events)
print(ex)
print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
print "Failed:", e.event_id, e.type, e.state_key
print "Auth_events:", auth_events
print ex
print json.dumps(e.get_dict(), sort_keys=True, indent=4)
# raise
print("Success:", e.event_id, e.type, e.state_key)
print "Success:", e.event_id, e.type, e.state_key
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
'json',
nargs='?',
type=argparse.FileType('r'),
default=sys.stdin,
)
args = parser.parse_args()

View File

@@ -1,15 +1,10 @@
import argparse
import hashlib
import json
import logging
import sys
from synapse.crypto.event_signing import *
from unpaddedbase64 import encode_base64
from synapse.crypto.event_signing import (
check_event_content_hash,
compute_event_reference_hash,
)
import argparse
import hashlib
import sys
import json
class dictobj(dict):
@@ -29,26 +24,27 @@ class dictobj(dict):
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
)
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
default=sys.stdin)
args = parser.parse_args()
logging.basicConfig()
event_json = dictobj(json.load(args.input_json))
algorithms = {"sha256": hashlib.sha256}
algorithms = {
"sha256": hashlib.sha256,
}
for alg_name in event_json.hashes:
if check_event_content_hash(event_json, algorithms[alg_name]):
print("PASS content hash %s" % (alg_name,))
print "PASS content hash %s" % (alg_name,)
else:
print("FAIL content hash %s" % (alg_name,))
print "FAIL content hash %s" % (alg_name,)
for algorithm in algorithms.values():
name, h_bytes = compute_event_reference_hash(event_json, algorithm)
print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))
print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
if __name__ == "__main__":
if __name__=="__main__":
main()

View File

@@ -1,15 +1,15 @@
import argparse
import json
import logging
import sys
import urllib2
import dns.resolver
from signedjson.key import decode_verify_key_bytes, write_signing_keys
from signedjson.sign import verify_signed_json
from signedjson.key import decode_verify_key_bytes, write_signing_keys
from unpaddedbase64 import decode_base64
import urllib2
import json
import sys
import dns.resolver
import pprint
import argparse
import logging
def get_targets(server_name):
if ":" in server_name:
@@ -23,7 +23,6 @@ def get_targets(server_name):
except dns.resolver.NXDOMAIN:
yield (server_name, 8448)
def get_server_keys(server_name, target, port):
url = "https://%s:%i/_matrix/key/v1" % (target, port)
keys = json.load(urllib2.urlopen(url))
@@ -34,14 +33,12 @@ def get_server_keys(server_name, target, port):
verify_keys[key_id] = verify_key
return verify_keys
def main():
parser = argparse.ArgumentParser()
parser.add_argument("signature_name")
parser.add_argument(
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
)
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
default=sys.stdin)
args = parser.parse_args()
logging.basicConfig()
@@ -51,23 +48,24 @@ def main():
for target, port in get_targets(server_name):
try:
keys = get_server_keys(server_name, target, port)
print("Using keys from https://%s:%s/_matrix/key/v1" % (target, port))
print "Using keys from https://%s:%s/_matrix/key/v1" % (target, port)
write_signing_keys(sys.stdout, keys.values())
break
except Exception:
except:
logging.exception("Error talking to %s:%s", target, port)
json_to_check = json.load(args.input_json)
print("Checking JSON:")
print "Checking JSON:"
for key_id in json_to_check["signatures"][args.signature_name]:
try:
key = keys[key_id]
verify_signed_json(json_to_check, args.signature_name, key)
print("PASS %s" % (key_id,))
except Exception:
print "PASS %s" % (key_id,)
except:
logging.exception("Check for key %s failed" % (key_id,))
print("FAIL %s" % (key_id,))
print "FAIL %s" % (key_id,)
if __name__ == '__main__':
main()

View File

@@ -1,21 +1,13 @@
import hashlib
import json
import sys
import time
import six
import psycopg2
import yaml
from canonicaljson import encode_canonical_json
import sys
import json
import time
import hashlib
from unpaddedbase64 import encode_base64
from signedjson.key import read_signing_keys
from signedjson.sign import sign_json
from unpaddedbase64 import encode_base64
if six.PY2:
db_type = six.moves.builtins.buffer
else:
db_type = memoryview
from canonicaljson import encode_canonical_json
def select_v1_keys(connection):
@@ -47,9 +39,7 @@ def select_v2_json(connection):
cursor.close()
results = {}
for server_name, key_id, key_json in rows:
results.setdefault(server_name, {})[key_id] = json.loads(
str(key_json).decode("utf-8")
)
results.setdefault(server_name, {})[key_id] = json.loads(str(key_json).decode("utf-8"))
return results
@@ -57,7 +47,10 @@ def convert_v1_to_v2(server_name, valid_until, keys, certificate):
return {
"old_verify_keys": {},
"server_name": server_name,
"verify_keys": {key_id: {"key": key} for key_id, key in keys.items()},
"verify_keys": {
key_id: {"key": key}
for key_id, key in keys.items()
},
"valid_until_ts": valid_until,
"tls_fingerprints": [fingerprint(certificate)],
}
@@ -72,7 +65,7 @@ def rows_v2(server, json):
valid_until = json["valid_until_ts"]
key_json = encode_canonical_json(json)
for key_id in json["verify_keys"]:
yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))
yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
def main():
@@ -94,7 +87,7 @@ def main():
result = {}
for server in keys:
if server not in json:
if not server in json:
v2_json = convert_v1_to_v2(
server, valid_until, keys[server], certificates[server]
)
@@ -103,7 +96,10 @@ def main():
yaml.safe_dump(result, sys.stdout, default_flow_style=False)
rows = list(row for server, json in result.items() for row in rows_v2(server, json))
rows = list(
row for server, json in result.items()
for row in rows_v2(server, json)
)
cursor = connection.cursor()
cursor.executemany(
@@ -111,7 +107,7 @@ def main():
" server_name, key_id, from_server,"
" ts_added_ms, ts_valid_until_ms, key_json"
") VALUES (%s, %s, %s, %s, %s, %s)",
rows,
rows
)
connection.commit()

33
scripts-dev/copyrighter-sql.pl Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/perl -pi
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
$copyright = <<EOT;
/* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
EOT
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);

33
scripts-dev/copyrighter.pl Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/perl -pi
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
$copyright = <<EOT;
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EOT
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);

View File

@@ -1,16 +1,8 @@
#! /usr/bin/python
from __future__ import print_function
import argparse
import ast
import os
import re
import sys
import yaml
class DefinitionVisitor(ast.NodeVisitor):
def __init__(self):
super(DefinitionVisitor, self).__init__()
@@ -50,18 +42,15 @@ def non_empty(defs):
functions = {name: non_empty(f) for name, f in defs['def'].items()}
classes = {name: non_empty(f) for name, f in defs['class'].items()}
result = {}
if functions:
result['def'] = functions
if classes:
result['class'] = classes
if functions: result['def'] = functions
if classes: result['class'] = classes
names = defs['names']
uses = []
for name in names.get('Load', ()):
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
uses.append(name)
uses.extend(defs['attrs'])
if uses:
result['uses'] = uses
if uses: result['uses'] = uses
result['names'] = names
result['attrs'] = defs['attrs']
return result
@@ -106,6 +95,7 @@ def used_names(prefix, item, defs, names):
if __name__ == '__main__':
import sys, os, argparse, re
parser = argparse.ArgumentParser(description='Find definitions.')
parser.add_argument(
@@ -115,28 +105,24 @@ if __name__ == '__main__':
"--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
)
parser.add_argument(
"--pattern", action="append", metavar="REGEXP", help="Search for a pattern"
"--pattern", action="append", metavar="REGEXP",
help="Search for a pattern"
)
parser.add_argument(
"directories",
nargs='+',
metavar="DIR",
help="Directories to search for definitions",
"directories", nargs='+', metavar="DIR",
help="Directories to search for definitions"
)
parser.add_argument(
"--referrers",
default=0,
type=int,
help="Include referrers up to the given depth",
"--referrers", default=0, type=int,
help="Include referrers up to the given depth"
)
parser.add_argument(
"--referred",
default=0,
type=int,
help="Include referred down to the given depth",
"--referred", default=0, type=int,
help="Include referred down to the given depth"
)
parser.add_argument(
"--format", default="yaml", help="Output format, one of 'yaml' or 'dot'"
"--format", default="yaml",
help="Output format, one of 'yaml' or 'dot'"
)
args = parser.parse_args()
@@ -176,7 +162,7 @@ if __name__ == '__main__':
for used_by in entry.get("used", ()):
referrers.add(used_by)
for name, definition in names.items():
if name not in referrers:
if not name in referrers:
continue
if ignore and any(pattern.match(name) for pattern in ignore):
continue
@@ -190,7 +176,7 @@ if __name__ == '__main__':
for uses in entry.get("uses", ()):
referred.add(uses)
for name, definition in names.items():
if name not in referred:
if not name in referred:
continue
if ignore and any(pattern.match(name) for pattern in ignore):
continue
@@ -199,12 +185,12 @@ if __name__ == '__main__':
if args.format == 'yaml':
yaml.dump(result, sys.stdout, default_flow_style=False)
elif args.format == 'dot':
print("digraph {")
print "digraph {"
for name, entry in result.items():
print(name)
print name
for used_by in entry.get("used", ()):
if used_by in result:
print(used_by, "->", name)
print("}")
print used_by, "->", name
print "}"
else:
raise ValueError("Unknown format %r" % (args.format))

View File

@@ -1,10 +1,7 @@
#!/usr/bin/env python2
from __future__ import print_function
import sys
import pymacaroons
import sys
if len(sys.argv) == 1:
sys.stderr.write("usage: %s macaroon [key]\n" % (sys.argv[0],))
@@ -14,14 +11,14 @@ macaroon_string = sys.argv[1]
key = sys.argv[2] if len(sys.argv) > 2 else None
macaroon = pymacaroons.Macaroon.deserialize(macaroon_string)
print(macaroon.inspect())
print macaroon.inspect()
print("")
print ""
verifier = pymacaroons.Verifier()
verifier.satisfy_general(lambda c: True)
try:
verifier.verify(macaroon, key)
print("Signature is correct")
print "Signature is correct"
except Exception as e:
print(str(e))
print str(e)

View File

@@ -18,21 +18,21 @@
from __future__ import print_function
import argparse
import base64
import json
import sys
from urlparse import urlparse, urlunparse
import nacl.signing
import json
import base64
import requests
import sys
from requests.adapters import HTTPAdapter
import srvlookup
import yaml
from requests.adapters import HTTPAdapter
# uncomment the following to enable debug logging of http requests
# from httplib import HTTPConnection
# HTTPConnection.debuglevel = 1
#from httplib import HTTPConnection
#HTTPConnection.debuglevel = 1
def encode_base64(input_bytes):
"""Encode bytes as a base64 string without any padding."""
@@ -58,15 +58,15 @@ def decode_base64(input_string):
def encode_canonical_json(value):
return json.dumps(
value,
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
ensure_ascii=False,
# Remove unecessary white space.
separators=(',', ':'),
# Sort the keys of dictionaries.
sort_keys=True,
# Encode the resulting unicode as UTF-8 bytes.
).encode("UTF-8")
value,
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
ensure_ascii=False,
# Remove unecessary white space.
separators=(',',':'),
# Sort the keys of dictionaries.
sort_keys=True,
# Encode the resulting unicode as UTF-8 bytes.
).encode("UTF-8")
def sign_json(json_object, signing_key, signing_name):
@@ -88,7 +88,6 @@ def sign_json(json_object, signing_key, signing_name):
NACL_ED25519 = "ed25519"
def decode_signing_key_base64(algorithm, version, key_base64):
"""Decode a base64 encoded signing key
Args:
@@ -144,12 +143,14 @@ def request_json(method, origin_name, origin_key, destination, path, content):
authorization_headers = []
for key, sig in signed_json["signatures"][origin_name].items():
header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig)
header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
origin_name, key, sig,
)
authorization_headers.append(bytes(header))
print("Authorization: %s" % header, file=sys.stderr)
print ("Authorization: %s" % header, file=sys.stderr)
dest = "matrix://%s%s" % (destination, path)
print("Requesting %s" % dest, file=sys.stderr)
print ("Requesting %s" % dest, file=sys.stderr)
s = requests.Session()
s.mount("matrix://", MatrixConnectionAdapter())
@@ -157,7 +158,10 @@ def request_json(method, origin_name, origin_key, destination, path, content):
result = s.request(
method=method,
url=dest,
headers={"Host": destination, "Authorization": authorization_headers[0]},
headers={
"Host": destination,
"Authorization": authorization_headers[0]
},
verify=False,
data=content,
)
@@ -167,50 +171,50 @@ def request_json(method, origin_name, origin_key, destination, path, content):
def main():
parser = argparse.ArgumentParser(
description="Signs and sends a federation request to a matrix homeserver"
description=
"Signs and sends a federation request to a matrix homeserver",
)
parser.add_argument(
"-N",
"--server-name",
"-N", "--server-name",
help="Name to give as the local homeserver. If unspecified, will be "
"read from the config file.",
"read from the config file.",
)
parser.add_argument(
"-k",
"--signing-key-path",
"-k", "--signing-key-path",
help="Path to the file containing the private ed25519 key to sign the "
"request with.",
"request with.",
)
parser.add_argument(
"-c",
"--config",
"-c", "--config",
default="homeserver.yaml",
help="Path to server config file. Ignored if --server-name and "
"--signing-key-path are both given.",
"--signing-key-path are both given.",
)
parser.add_argument(
"-d",
"--destination",
"-d", "--destination",
default="matrix.org",
help="name of the remote homeserver. We will do SRV lookups and "
"connect appropriately.",
"connect appropriately.",
)
parser.add_argument(
"-X",
"--method",
"-X", "--method",
help="HTTP method to use for the request. Defaults to GET if --data is"
"unspecified, POST if it is.",
"unspecified, POST if it is."
)
parser.add_argument("--body", help="Data to send as the body of the HTTP request")
parser.add_argument(
"--body",
help="Data to send as the body of the HTTP request"
)
parser.add_argument(
"path", help="request path. We will add '/_matrix/federation/v1/' to this."
"path",
help="request path. We will add '/_matrix/federation/v1/' to this."
)
args = parser.parse_args()
@@ -223,15 +227,13 @@ def main():
result = request_json(
args.method,
args.server_name,
key,
args.destination,
args.server_name, key, args.destination,
"/_matrix/federation/v1/" + args.path,
content=args.body,
)
json.dump(result, sys.stdout)
print("")
print ("")
def read_args_from_config(args):
@@ -251,7 +253,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
return s, 8448
if ":" in s:
out = s.rsplit(":", 1)
out = s.rsplit(":",1)
try:
port = int(out[1])
except ValueError:
@@ -261,7 +263,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
try:
srv = srvlookup.lookup("matrix", "tcp", s)[0]
return srv.host, srv.port
except Exception:
except:
return s, 8448
def get_connection(self, url, proxies=None):
@@ -270,9 +272,10 @@ class MatrixConnectionAdapter(HTTPAdapter):
(host, port) = self.lookup(parsed.netloc)
netloc = "%s:%d" % (host, port)
print("Connecting to %s" % (netloc,), file=sys.stderr)
url = urlunparse(
("https", netloc, parsed.path, parsed.params, parsed.query, parsed.fragment)
)
url = urlunparse((
"https", netloc, parsed.path, parsed.params, parsed.query,
parsed.fragment,
))
return super(MatrixConnectionAdapter, self).get_connection(url, proxies)

View File

@@ -1,31 +1,23 @@
from __future__ import print_function
import sqlite3
import sys
from unpaddedbase64 import decode_base64, encode_base64
from synapse.crypto.event_signing import (
add_event_pdu_content_hash,
compute_pdu_event_reference_hash,
)
from synapse.federation.units import Pdu
from synapse.storage._base import SQLBaseStore
from synapse.storage.pdu import PduStore
from synapse.storage.signatures import SignatureStore
from synapse.storage._base import SQLBaseStore
from synapse.federation.units import Pdu
from synapse.crypto.event_signing import (
add_event_pdu_content_hash, compute_pdu_event_reference_hash
)
from synapse.api.events.utils import prune_pdu
from unpaddedbase64 import encode_base64, decode_base64
from canonicaljson import encode_canonical_json
import sqlite3
import sys
class Store(object):
_get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"]
_get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"]
_get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"]
_get_pdu_origin_signatures_txn = SignatureStore.__dict__[
"_get_pdu_origin_signatures_txn"
]
_get_pdu_origin_signatures_txn = SignatureStore.__dict__["_get_pdu_origin_signatures_txn"]
_store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"]
_store_pdu_reference_hash_txn = SignatureStore.__dict__[
"_store_pdu_reference_hash_txn"
]
_store_pdu_reference_hash_txn = SignatureStore.__dict__["_store_pdu_reference_hash_txn"]
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
@@ -34,7 +26,9 @@ store = Store()
def select_pdus(cursor):
cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC")
cursor.execute(
"SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
)
ids = cursor.fetchall()
@@ -47,30 +41,23 @@ def select_pdus(cursor):
for pdu in pdus:
try:
if pdu.prev_pdus:
print("PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
for pdu_id, origin, hashes in pdu.prev_pdus:
ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
hashes[ref_alg] = encode_base64(ref_hsh)
store._store_prev_pdu_hash_txn(
cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh
)
print("SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
pdu = add_event_pdu_content_hash(pdu)
ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
store._store_pdu_reference_hash_txn(
cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh
)
store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)
for alg, hsh_base64 in pdu.hashes.items():
print(alg, hsh_base64)
store._store_pdu_content_hash_txn(
cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)
)
except Exception:
print("FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
print alg, hsh_base64
store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))
except:
print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
def main():
conn = sqlite3.connect(sys.argv[1])
@@ -78,6 +65,5 @@ def main():
select_pdus(cursor)
conn.commit()
if __name__ == '__main__':
if __name__=='__main__':
main()

View File

@@ -1,17 +1,18 @@
#! /usr/bin/python
import argparse
import ast
import argparse
import os
import sys
import yaml
PATTERNS_V1 = []
PATTERNS_V2 = []
RESULT = {"v1": PATTERNS_V1, "v2": PATTERNS_V2}
RESULT = {
"v1": PATTERNS_V1,
"v2": PATTERNS_V2,
}
class CallVisitor(ast.NodeVisitor):
def visit_Call(self, node):
@@ -20,6 +21,7 @@ class CallVisitor(ast.NodeVisitor):
else:
return
if name == "client_path_patterns":
PATTERNS_V1.append(node.args[0].s)
elif name == "client_v2_patterns":
@@ -40,10 +42,8 @@ def find_patterns_in_file(filepath):
parser = argparse.ArgumentParser(description='Find url patterns.')
parser.add_argument(
"directories",
nargs='+',
metavar="DIR",
help="Directories to search for definitions",
"directories", nargs='+', metavar="DIR",
help="Directories to search for definitions"
)
args = parser.parse_args()

View File

@@ -1,9 +1,8 @@
import requests
import collections
import json
import sys
import time
import requests
import json
Entry = collections.namedtuple("Entry", "name position rows")
@@ -31,11 +30,11 @@ def parse_response(content):
def replicate(server, streams):
return parse_response(
requests.get(
server + "/_synapse/replication", verify=False, params=streams
).content
)
return parse_response(requests.get(
server + "/_synapse/replication",
verify=False,
params=streams
).content)
def main():
@@ -46,16 +45,16 @@ def main():
try:
streams = {
row.name: row.position
for row in replicate(server, {"streams": "-1"})["streams"].rows
for row in replicate(server, {"streams":"-1"})["streams"].rows
}
except requests.exceptions.ConnectionError:
except requests.exceptions.ConnectionError as e:
time.sleep(0.1)
while True:
try:
results = replicate(server, streams)
except Exception:
sys.stdout.write("connection_lost(" + repr(streams) + ")\n")
except:
sys.stdout.write("connection_lost("+ repr(streams) + ")\n")
break
for update in results.values():
for row in update.rows:
@@ -63,5 +62,6 @@ def main():
streams[update.name] = update.position
if __name__ == '__main__':
if __name__=='__main__':
main()

View File

@@ -1,10 +1,12 @@
#!/usr/bin/env python
import argparse
import getpass
import sys
import bcrypt
import getpass
import yaml
bcrypt_rounds=12
@@ -50,3 +52,4 @@ if __name__ == "__main__":
password = prompt_for_pass()
print bcrypt.hashpw(password + password_pepper, bcrypt.gensalt(bcrypt_rounds))

View File

@@ -36,10 +36,13 @@ from __future__ import print_function
import argparse
import logging
import os
import shutil
import sys
import os
import shutil
from synapse.rest.media.v1.filepath import MediaFilePaths
logger = logging.getLogger()
@@ -74,23 +77,24 @@ def move_media(origin_server, file_id, src_paths, dest_paths):
if not os.path.exists(original_file):
logger.warn(
"Original for %s/%s (%s) does not exist",
origin_server,
file_id,
original_file,
origin_server, file_id, original_file,
)
else:
mkdir_and_move(
original_file, dest_paths.remote_media_filepath(origin_server, file_id)
original_file,
dest_paths.remote_media_filepath(origin_server, file_id),
)
# now look for thumbnails
original_thumb_dir = src_paths.remote_media_thumbnail_dir(origin_server, file_id)
original_thumb_dir = src_paths.remote_media_thumbnail_dir(
origin_server, file_id,
)
if not os.path.exists(original_thumb_dir):
return
mkdir_and_move(
original_thumb_dir,
dest_paths.remote_media_thumbnail_dir(origin_server, file_id),
dest_paths.remote_media_thumbnail_dir(origin_server, file_id)
)
@@ -105,16 +109,24 @@ def mkdir_and_move(original_file, dest_file):
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
description=__doc__,
formatter_class = argparse.RawDescriptionHelpFormatter,
)
parser.add_argument(
"-v", action='store_true', help='enable debug logging')
parser.add_argument(
"src_repo",
help="Path to source content repo",
)
parser.add_argument(
"dest_repo",
help="Path to source content repo",
)
parser.add_argument("-v", action='store_true', help='enable debug logging')
parser.add_argument("src_repo", help="Path to source content repo")
parser.add_argument("dest_repo", help="Path to source content repo")
args = parser.parse_args()
logging_config = {
"level": logging.DEBUG if args.v else logging.INFO,
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
}
logging.basicConfig(**logging_config)

View File

@@ -14,9 +14,197 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from synapse._scripts.register_new_matrix_user import main
import argparse
import getpass
import hashlib
import hmac
import json
import sys
import urllib2
import yaml
def request_registration(user, password, server_location, shared_secret, admin=False):
req = urllib2.Request(
"%s/_matrix/client/r0/admin/register" % (server_location,),
headers={'Content-Type': 'application/json'}
)
try:
if sys.version_info[:3] >= (2, 7, 9):
# As of version 2.7.9, urllib2 now checks SSL certs
import ssl
f = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
else:
f = urllib2.urlopen(req)
body = f.read()
f.close()
nonce = json.loads(body)["nonce"]
except urllib2.HTTPError as e:
print "ERROR! Received %d %s" % (e.code, e.reason,)
if 400 <= e.code < 500:
if e.info().type == "application/json":
resp = json.load(e)
if "error" in resp:
print resp["error"]
sys.exit(1)
mac = hmac.new(
key=shared_secret,
digestmod=hashlib.sha1,
)
mac.update(nonce)
mac.update("\x00")
mac.update(user)
mac.update("\x00")
mac.update(password)
mac.update("\x00")
mac.update("admin" if admin else "notadmin")
mac = mac.hexdigest()
data = {
"nonce": nonce,
"username": user,
"password": password,
"mac": mac,
"admin": admin,
}
server_location = server_location.rstrip("/")
print "Sending registration request..."
req = urllib2.Request(
"%s/_matrix/client/r0/admin/register" % (server_location,),
data=json.dumps(data),
headers={'Content-Type': 'application/json'}
)
try:
if sys.version_info[:3] >= (2, 7, 9):
# As of version 2.7.9, urllib2 now checks SSL certs
import ssl
f = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
else:
f = urllib2.urlopen(req)
f.read()
f.close()
print "Success."
except urllib2.HTTPError as e:
print "ERROR! Received %d %s" % (e.code, e.reason,)
if 400 <= e.code < 500:
if e.info().type == "application/json":
resp = json.load(e)
if "error" in resp:
print resp["error"]
sys.exit(1)
def register_new_user(user, password, server_location, shared_secret, admin):
if not user:
try:
default_user = getpass.getuser()
except:
default_user = None
if default_user:
user = raw_input("New user localpart [%s]: " % (default_user,))
if not user:
user = default_user
else:
user = raw_input("New user localpart: ")
if not user:
print "Invalid user name"
sys.exit(1)
if not password:
password = getpass.getpass("Password: ")
if not password:
print "Password cannot be blank."
sys.exit(1)
confirm_password = getpass.getpass("Confirm password: ")
if password != confirm_password:
print "Passwords do not match"
sys.exit(1)
if admin is None:
admin = raw_input("Make admin [no]: ")
if admin in ("y", "yes", "true"):
admin = True
else:
admin = False
request_registration(user, password, server_location, shared_secret, bool(admin))
if __name__ == "__main__":
main()
parser = argparse.ArgumentParser(
description="Used to register new users with a given home server when"
" registration has been disabled. The home server must be"
" configured with the 'registration_shared_secret' option"
" set.",
)
parser.add_argument(
"-u", "--user",
default=None,
help="Local part of the new user. Will prompt if omitted.",
)
parser.add_argument(
"-p", "--password",
default=None,
help="New password for user. Will prompt if omitted.",
)
admin_group = parser.add_mutually_exclusive_group()
admin_group.add_argument(
"-a", "--admin",
action="store_true",
help="Register new user as an admin. Will prompt if --no-admin is not set either.",
)
admin_group.add_argument(
"--no-admin",
action="store_true",
help="Register new user as a regular user. Will prompt if --admin is not set either.",
)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
"-c", "--config",
type=argparse.FileType('r'),
help="Path to server config file. Used to read in shared secret.",
)
group.add_argument(
"-k", "--shared-secret",
help="Shared secret as defined in server config file.",
)
parser.add_argument(
"server_url",
default="https://localhost:8448",
nargs='?',
help="URL to use to talk to the home server. Defaults to "
" 'https://localhost:8448'.",
)
args = parser.parse_args()
if "config" in args and args.config:
config = yaml.safe_load(args.config)
secret = config.get("registration_shared_secret", None)
if not secret:
print "No 'registration_shared_secret' defined in config."
sys.exit(1)
else:
secret = args.shared_secret
admin = None
if args.admin or args.no_admin:
admin = args.admin
register_new_user(args.user, args.password, args.server_url, secret, admin)

View File

@@ -15,23 +15,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer, reactor
from twisted.enterprise import adbapi
from synapse.storage._base import LoggingTransaction, SQLBaseStore
from synapse.storage.engines import create_engine
from synapse.storage.prepare_database import prepare_database
import argparse
import curses
import logging
import sys
import time
import traceback
import yaml
from six import string_types
import yaml
from twisted.enterprise import adbapi
from twisted.internet import defer, reactor
from synapse.storage._base import LoggingTransaction, SQLBaseStore
from synapse.storage.engines import create_engine
from synapse.storage.prepare_database import prepare_database
logger = logging.getLogger("synapse_port_db")
@@ -105,7 +105,6 @@ class Store(object):
*All* database interactions should go through this object.
"""
def __init__(self, db_pool, engine):
self.db_pool = db_pool
self.database_engine = engine
@@ -136,8 +135,7 @@ class Store(object):
txn = conn.cursor()
return func(
LoggingTransaction(txn, desc, self.database_engine, [], []),
*args,
**kwargs
*args, **kwargs
)
except self.database_engine.module.DatabaseError as e:
if self.database_engine.is_deadlock(e):
@@ -160,20 +158,22 @@ class Store(object):
def r(txn):
txn.execute(sql, args)
return txn.fetchall()
return self.runInteraction("execute_sql", r)
def insert_many_txn(self, txn, table, headers, rows):
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
table,
", ".join(k for k in headers),
", ".join("%s" for _ in headers),
", ".join("%s" for _ in headers)
)
try:
txn.executemany(sql, rows)
except Exception:
logger.exception("Failed to insert: %s", table)
except:
logger.exception(
"Failed to insert: %s",
table,
)
raise
@@ -206,7 +206,7 @@ class Porter(object):
"table_name": table,
"forward_rowid": 1,
"backward_rowid": 0,
},
}
)
forward_chunk = 1
@@ -221,10 +221,10 @@ class Porter(object):
table, forward_chunk, backward_chunk
)
else:
def delete_all(txn):
txn.execute(
"DELETE FROM port_from_sqlite3 WHERE table_name = %s", (table,)
"DELETE FROM port_from_sqlite3 WHERE table_name = %s",
(table,)
)
txn.execute("TRUNCATE %s CASCADE" % (table,))
@@ -232,7 +232,11 @@ class Porter(object):
yield self.postgres_store._simple_insert(
table="port_from_sqlite3",
values={"table_name": table, "forward_rowid": 1, "backward_rowid": 0},
values={
"table_name": table,
"forward_rowid": 1,
"backward_rowid": 0,
}
)
forward_chunk = 1
@@ -247,16 +251,12 @@ class Porter(object):
)
@defer.inlineCallbacks
def handle_table(
self, table, postgres_size, table_size, forward_chunk, backward_chunk
):
def handle_table(self, table, postgres_size, table_size, forward_chunk,
backward_chunk):
logger.info(
"Table %s: %i/%i (rows %i-%i) already ported",
table,
postgres_size,
table_size,
backward_chunk + 1,
forward_chunk - 1,
table, postgres_size, table_size,
backward_chunk+1, forward_chunk-1,
)
if not table_size:
@@ -271,9 +271,7 @@ class Porter(object):
return
if table in (
"user_directory",
"user_directory_search",
"users_who_share_rooms",
"user_directory", "user_directory_search", "users_who_share_rooms",
"users_in_pubic_room",
):
# We don't port these tables, as they're a faff and we can regenreate
@@ -285,35 +283,37 @@ class Porter(object):
# We need to make sure there is a single row, `(X, null), as that is
# what synapse expects to be there.
yield self.postgres_store._simple_insert(
table=table, values={"stream_id": None}
table=table,
values={"stream_id": None},
)
self.progress.update(table, table_size) # Mark table as done
return
forward_select = (
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?" % (table,)
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?"
% (table,)
)
backward_select = (
"SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?" % (table,)
"SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?"
% (table,)
)
do_forward = [True]
do_backward = [True]
while True:
def r(txn):
forward_rows = []
backward_rows = []
if do_forward[0]:
txn.execute(forward_select, (forward_chunk, self.batch_size))
txn.execute(forward_select, (forward_chunk, self.batch_size,))
forward_rows = txn.fetchall()
if not forward_rows:
do_forward[0] = False
if do_backward[0]:
txn.execute(backward_select, (backward_chunk, self.batch_size))
txn.execute(backward_select, (backward_chunk, self.batch_size,))
backward_rows = txn.fetchall()
if not backward_rows:
do_backward[0] = False
@@ -325,7 +325,9 @@ class Porter(object):
return headers, forward_rows, backward_rows
headers, frows, brows = yield self.sqlite_store.runInteraction("select", r)
headers, frows, brows = yield self.sqlite_store.runInteraction(
"select", r
)
if frows or brows:
if frows:
@@ -337,7 +339,9 @@ class Porter(object):
rows = self._convert_rows(table, headers, rows)
def insert(txn):
self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
self.postgres_store.insert_many_txn(
txn, table, headers[1:], rows
)
self.postgres_store._simple_update_one_txn(
txn,
@@ -358,9 +362,8 @@ class Porter(object):
return
@defer.inlineCallbacks
def handle_search_table(
self, postgres_size, table_size, forward_chunk, backward_chunk
):
def handle_search_table(self, postgres_size, table_size, forward_chunk,
backward_chunk):
select = (
"SELECT es.rowid, es.*, e.origin_server_ts, e.stream_ordering"
" FROM event_search as es"
@@ -370,9 +373,8 @@ class Porter(object):
)
while True:
def r(txn):
txn.execute(select, (forward_chunk, self.batch_size))
txn.execute(select, (forward_chunk, self.batch_size,))
rows = txn.fetchall()
headers = [column[0] for column in txn.description]
@@ -400,21 +402,18 @@ class Porter(object):
else:
rows_dict.append(d)
txn.executemany(
sql,
[
(
row["event_id"],
row["room_id"],
row["key"],
row["sender"],
row["value"],
row["origin_server_ts"],
row["stream_ordering"],
)
for row in rows_dict
],
)
txn.executemany(sql, [
(
row["event_id"],
row["room_id"],
row["key"],
row["sender"],
row["value"],
row["origin_server_ts"],
row["stream_ordering"],
)
for row in rows_dict
])
self.postgres_store._simple_update_one_txn(
txn,
@@ -438,8 +437,7 @@ class Porter(object):
def setup_db(self, db_config, database_engine):
db_conn = database_engine.module.connect(
**{
k: v
for k, v in db_config.get("args", {}).items()
k: v for k, v in db_config.get("args", {}).items()
if not k.startswith("cp_")
}
)
@@ -452,11 +450,13 @@ class Porter(object):
def run(self):
try:
sqlite_db_pool = adbapi.ConnectionPool(
self.sqlite_config["name"], **self.sqlite_config["args"]
self.sqlite_config["name"],
**self.sqlite_config["args"]
)
postgres_db_pool = adbapi.ConnectionPool(
self.postgres_config["name"], **self.postgres_config["args"]
self.postgres_config["name"],
**self.postgres_config["args"]
)
sqlite_engine = create_engine(sqlite_config)
@@ -465,7 +465,9 @@ class Porter(object):
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
self.postgres_store = Store(postgres_db_pool, postgres_engine)
yield self.postgres_store.execute(postgres_engine.check_database)
yield self.postgres_store.execute(
postgres_engine.check_database
)
# Step 1. Set up databases.
self.progress.set_state("Preparing SQLite3")
@@ -475,7 +477,6 @@ class Porter(object):
self.setup_db(postgres_config, postgres_engine)
self.progress.set_state("Creating port tables")
def create_port_table(txn):
txn.execute(
"CREATE TABLE IF NOT EXISTS port_from_sqlite3 ("
@@ -500,9 +501,10 @@ class Porter(object):
)
try:
yield self.postgres_store.runInteraction("alter_table", alter_table)
except Exception:
# On Error Resume Next
yield self.postgres_store.runInteraction(
"alter_table", alter_table
)
except Exception as e:
pass
yield self.postgres_store.runInteraction(
@@ -512,7 +514,11 @@ class Porter(object):
# Step 2. Get tables.
self.progress.set_state("Fetching tables")
sqlite_tables = yield self.sqlite_store._simple_select_onecol(
table="sqlite_master", keyvalues={"type": "table"}, retcol="name"
table="sqlite_master",
keyvalues={
"type": "table",
},
retcol="name",
)
postgres_tables = yield self.postgres_store._simple_select_onecol(
@@ -539,14 +545,18 @@ class Porter(object):
# Step 4. Do the copying.
self.progress.set_state("Copying to postgres")
yield defer.gatherResults(
[self.handle_table(*res) for res in setup_res], consumeErrors=True
[
self.handle_table(*res)
for res in setup_res
],
consumeErrors=True,
)
# Step 5. Do final post-processing
yield self._setup_state_group_id_seq()
self.progress.done()
except Exception:
except:
global end_error_exec_info
end_error_exec_info = sys.exc_info()
logger.exception("")
@@ -556,7 +566,9 @@ class Porter(object):
def _convert_rows(self, table, headers, rows):
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
bool_cols = [
i for i, h in enumerate(headers) if h in bool_col_names
]
class BadValueException(Exception):
pass
@@ -565,21 +577,18 @@ class Porter(object):
if j in bool_cols:
return bool(col)
elif isinstance(col, string_types) and "\0" in col:
logger.warn(
"DROPPING ROW: NUL value in table %s col %s: %r",
table,
headers[j],
col,
)
raise BadValueException()
logger.warn("DROPPING ROW: NUL value in table %s col %s: %r", table, headers[j], col)
raise BadValueException();
return col
outrows = []
for i, row in enumerate(rows):
try:
outrows.append(
tuple(conv(j, col) for j, col in enumerate(row) if j > 0)
)
outrows.append(tuple(
conv(j, col)
for j, col in enumerate(row)
if j > 0
))
except BadValueException:
pass
@@ -607,7 +616,9 @@ class Porter(object):
return headers, [r for r in rows if r[ts_ind] < yesterday]
headers, rows = yield self.sqlite_store.runInteraction("select", r)
headers, rows = yield self.sqlite_store.runInteraction(
"select", r,
)
rows = self._convert_rows("sent_transactions", headers, rows)
@@ -628,7 +639,7 @@ class Porter(object):
txn.execute(
"SELECT rowid FROM sent_transactions WHERE ts >= ?"
" ORDER BY rowid ASC LIMIT 1",
(yesterday,),
(yesterday,)
)
rows = txn.fetchall()
@@ -646,17 +657,21 @@ class Porter(object):
"table_name": "sent_transactions",
"forward_rowid": next_chunk,
"backward_rowid": 0,
},
}
)
def get_sent_table_size(txn):
txn.execute(
"SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
"SELECT count(*) FROM sent_transactions"
" WHERE ts >= ?",
(yesterday,)
)
size, = txn.fetchone()
return int(size)
remaining_count = yield self.sqlite_store.execute(get_sent_table_size)
remaining_count = yield self.sqlite_store.execute(
get_sent_table_size
)
total_count = remaining_count + inserted_rows
@@ -665,11 +680,13 @@ class Porter(object):
@defer.inlineCallbacks
def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
frows = yield self.sqlite_store.execute_sql(
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,),
forward_chunk,
)
brows = yield self.sqlite_store.execute_sql(
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,),
backward_chunk,
)
defer.returnValue(frows[0][0] + brows[0][0])
@@ -677,7 +694,7 @@ class Porter(object):
@defer.inlineCallbacks
def _get_already_ported_count(self, table):
rows = yield self.postgres_store.execute_sql(
"SELECT count(*) FROM %s" % (table,)
"SELECT count(*) FROM %s" % (table,),
)
defer.returnValue(rows[0][0])
@@ -700,21 +717,22 @@ class Porter(object):
def _setup_state_group_id_seq(self):
def r(txn):
txn.execute("SELECT MAX(id) FROM state_groups")
next_id = txn.fetchone()[0] + 1
txn.execute("ALTER SEQUENCE state_group_id_seq RESTART WITH %s", (next_id,))
next_id = txn.fetchone()[0]+1
txn.execute(
"ALTER SEQUENCE state_group_id_seq RESTART WITH %s",
(next_id,),
)
return self.postgres_store.runInteraction("setup_state_group_id_seq", r)
##############################################
# The following is simply UI stuff
###### The following is simply UI stuff ######
##############################################
class Progress(object):
"""Used to report progress of the port
"""
def __init__(self):
self.tables = {}
@@ -740,7 +758,6 @@ class Progress(object):
class CursesProgress(Progress):
"""Reports progress to a curses window
"""
def __init__(self, stdscr):
self.stdscr = stdscr
@@ -784,7 +801,7 @@ class CursesProgress(Progress):
duration = int(now) - int(self.start_time)
minutes, seconds = divmod(duration, 60)
duration_str = '%02dm %02ds' % (minutes, seconds)
duration_str = '%02dm %02ds' % (minutes, seconds,)
if self.finished:
status = "Time spent: %s (Done!)" % (duration_str,)
@@ -797,12 +814,16 @@ class CursesProgress(Progress):
est_remaining_str = '%02dm %02ds remaining' % divmod(est_remaining, 60)
else:
est_remaining_str = "Unknown"
status = "Time spent: %s (est. remaining: %s)" % (
duration_str,
est_remaining_str,
status = (
"Time spent: %s (est. remaining: %s)"
% (duration_str, est_remaining_str,)
)
self.stdscr.addstr(0, 0, status, curses.A_BOLD)
self.stdscr.addstr(
0, 0,
status,
curses.A_BOLD,
)
max_len = max([len(t) for t in self.tables.keys()])
@@ -810,7 +831,9 @@ class CursesProgress(Progress):
middle_space = 1
items = self.tables.items()
items.sort(key=lambda i: (i[1]["perc"], i[0]))
items.sort(
key=lambda i: (i[1]["perc"], i[0]),
)
for i, (table, data) in enumerate(items):
if i + 2 >= rows:
@@ -821,7 +844,9 @@ class CursesProgress(Progress):
color = curses.color_pair(2) if perc == 100 else curses.color_pair(1)
self.stdscr.addstr(
i + 2, left_margin + max_len - len(table), table, curses.A_BOLD | color
i + 2, left_margin + max_len - len(table),
table,
curses.A_BOLD | color,
)
size = 20
@@ -832,13 +857,15 @@ class CursesProgress(Progress):
)
self.stdscr.addstr(
i + 2,
left_margin + max_len + middle_space,
i + 2, left_margin + max_len + middle_space,
"%s %3d%% (%d/%d)" % (progress, perc, data["num_done"], data["total"]),
)
if self.finished:
self.stdscr.addstr(rows - 1, 0, "Press any key to exit...")
self.stdscr.addstr(
rows - 1, 0,
"Press any key to exit...",
)
self.stdscr.refresh()
self.last_update = time.time()
@@ -850,25 +877,29 @@ class CursesProgress(Progress):
def set_state(self, state):
self.stdscr.clear()
self.stdscr.addstr(0, 0, state + "...", curses.A_BOLD)
self.stdscr.addstr(
0, 0,
state + "...",
curses.A_BOLD,
)
self.stdscr.refresh()
class TerminalProgress(Progress):
"""Just prints progress to the terminal
"""
def update(self, table, num_done):
super(TerminalProgress, self).update(table, num_done)
data = self.tables[table]
print(
"%s: %d%% (%d/%d)" % (table, data["perc"], data["num_done"], data["total"])
print "%s: %d%% (%d/%d)" % (
table, data["perc"],
data["num_done"], data["total"],
)
def set_state(self, state):
print(state + "...")
print state + "..."
##############################################
@@ -878,38 +909,34 @@ class TerminalProgress(Progress):
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="A script to port an existing synapse SQLite database to"
" a new PostgreSQL database."
" a new PostgreSQL database."
)
parser.add_argument("-v", action='store_true')
parser.add_argument(
"--sqlite-database",
required=True,
"--sqlite-database", required=True,
help="The snapshot of the SQLite database file. This must not be"
" currently used by a running synapse server",
" currently used by a running synapse server"
)
parser.add_argument(
"--postgres-config",
type=argparse.FileType('r'),
required=True,
help="The database config file for the PostgreSQL database",
"--postgres-config", type=argparse.FileType('r'), required=True,
help="The database config file for the PostgreSQL database"
)
parser.add_argument(
"--curses", action='store_true', help="display a curses based progress UI"
"--curses", action='store_true',
help="display a curses based progress UI"
)
parser.add_argument(
"--batch-size",
type=int,
default=1000,
"--batch-size", type=int, default=1000,
help="The number of rows to select from the SQLite table each"
" iteration [default=1000]",
" iteration [default=1000]",
)
args = parser.parse_args()
logging_config = {
"level": logging.DEBUG if args.v else logging.INFO,
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
}
if args.curses:

View File

@@ -14,16 +14,17 @@ ignore =
pylint.cfg
tox.ini
[flake8]
[pep8]
max-line-length = 90
# W503 requires that binary operators be at the end, not start, of lines. Erik
# doesn't like it. E203 is contrary to PEP8. E731 is silly.
ignore = W503,E203,E731
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
# for error codes. The ones we ignore are:
# W503: line break before binary operator
# W504: line break after binary operator
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
ignore=W503,W504,E203,E731
[flake8]
# note that flake8 inherits the "ignore" settings from "pep8" (because it uses
# pep8 to do those checks), but not the "max-line-length" setting
max-line-length = 90
ignore=W503,E203,E731
[isort]
line_length = 89

View File

@@ -1,8 +1,6 @@
#!/usr/bin/env python
# Copyright 2014-2017 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -88,7 +86,7 @@ setup(
name="matrix-synapse",
version=version,
packages=find_packages(exclude=["tests", "tests.*"]),
description="Reference homeserver for the Matrix decentralised comms protocol",
description="Reference Synapse Home Server",
install_requires=dependencies['requirements'](include_conditional=True).keys(),
dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
include_package_data=True,

View File

@@ -27,4 +27,4 @@ try:
except ImportError:
pass
__version__ = "0.33.8rc2"
__version__ = "0.33.5.1"

View File

@@ -1,215 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import getpass
import hashlib
import hmac
import logging
import sys
from six.moves import input
import requests as _requests
import yaml
def request_registration(
user,
password,
server_location,
shared_secret,
admin=False,
requests=_requests,
_print=print,
exit=sys.exit,
):
url = "%s/_matrix/client/r0/admin/register" % (server_location,)
# Get the nonce
r = requests.get(url, verify=False)
if r.status_code is not 200:
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
if 400 <= r.status_code < 500:
try:
_print(r.json()["error"])
except Exception:
pass
return exit(1)
nonce = r.json()["nonce"]
mac = hmac.new(key=shared_secret.encode('utf8'), digestmod=hashlib.sha1)
mac.update(nonce.encode('utf8'))
mac.update(b"\x00")
mac.update(user.encode('utf8'))
mac.update(b"\x00")
mac.update(password.encode('utf8'))
mac.update(b"\x00")
mac.update(b"admin" if admin else b"notadmin")
mac = mac.hexdigest()
data = {
"nonce": nonce,
"username": user,
"password": password,
"mac": mac,
"admin": admin,
}
_print("Sending registration request...")
r = requests.post(url, json=data, verify=False)
if r.status_code is not 200:
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
if 400 <= r.status_code < 500:
try:
_print(r.json()["error"])
except Exception:
pass
return exit(1)
_print("Success!")
def register_new_user(user, password, server_location, shared_secret, admin):
if not user:
try:
default_user = getpass.getuser()
except Exception:
default_user = None
if default_user:
user = input("New user localpart [%s]: " % (default_user,))
if not user:
user = default_user
else:
user = input("New user localpart: ")
if not user:
print("Invalid user name")
sys.exit(1)
if not password:
password = getpass.getpass("Password: ")
if not password:
print("Password cannot be blank.")
sys.exit(1)
confirm_password = getpass.getpass("Confirm password: ")
if password != confirm_password:
print("Passwords do not match")
sys.exit(1)
if admin is None:
admin = input("Make admin [no]: ")
if admin in ("y", "yes", "true"):
admin = True
else:
admin = False
request_registration(user, password, server_location, shared_secret, bool(admin))
def main():
logging.captureWarnings(True)
parser = argparse.ArgumentParser(
description="Used to register new users with a given home server when"
" registration has been disabled. The home server must be"
" configured with the 'registration_shared_secret' option"
" set."
)
parser.add_argument(
"-u",
"--user",
default=None,
help="Local part of the new user. Will prompt if omitted.",
)
parser.add_argument(
"-p",
"--password",
default=None,
help="New password for user. Will prompt if omitted.",
)
admin_group = parser.add_mutually_exclusive_group()
admin_group.add_argument(
"-a",
"--admin",
action="store_true",
help=(
"Register new user as an admin. "
"Will prompt if --no-admin is not set either."
),
)
admin_group.add_argument(
"--no-admin",
action="store_true",
help=(
"Register new user as a regular user. "
"Will prompt if --admin is not set either."
),
)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
"-c",
"--config",
type=argparse.FileType('r'),
help="Path to server config file. Used to read in shared secret.",
)
group.add_argument(
"-k", "--shared-secret", help="Shared secret as defined in server config file."
)
parser.add_argument(
"server_url",
default="https://localhost:8448",
nargs='?',
help="URL to use to talk to the home server. Defaults to "
" 'https://localhost:8448'.",
)
args = parser.parse_args()
if "config" in args and args.config:
config = yaml.safe_load(args.config)
secret = config.get("registration_shared_secret", None)
if not secret:
print("No 'registration_shared_secret' defined in config.")
sys.exit(1)
else:
secret = args.shared_secret
admin = None
if args.admin or args.no_admin:
admin = args.admin
register_new_user(args.user, args.password, args.server_url, secret, admin)
if __name__ == "__main__":
main()

View File

@@ -59,7 +59,6 @@ class Codes(object):
RESOURCE_LIMIT_EXCEEDED = "M_RESOURCE_LIMIT_EXCEEDED"
UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION"
INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION"
WRONG_ROOM_KEYS_VERSION = "M_WRONG_ROOM_KEYS_VERSION"
class CodeMessageException(RuntimeError):
@@ -313,20 +312,6 @@ class LimitExceededError(SynapseError):
)
class RoomKeysVersionError(SynapseError):
"""A client has tried to upload to a non-current version of the room_keys store
"""
def __init__(self, current_version):
"""
Args:
current_version (str): the current version of the store they should have used
"""
super(RoomKeysVersionError, self).__init__(
403, "Wrong room_keys version", Codes.WRONG_ROOM_KEYS_VERSION
)
self.current_version = current_version
class IncompatibleRoomVersionError(SynapseError):
"""A server is trying to join a room whose version it does not support."""

View File

@@ -172,10 +172,7 @@ USER_FILTER_SCHEMA = {
# events a lot easier as we can then use a negative lookbehind
# assertion to split '\.' If we allowed \\ then it would
# incorrectly split '\\.' See synapse.events.utils.serialize_event
#
# Note that because this is a regular expression, we have to escape
# each backslash in the pattern.
"pattern": r"^((?!\\\\).)*$"
"pattern": "^((?!\\\).)*$"
}
}
},

View File

@@ -64,7 +64,7 @@ class ConsentURIBuilder(object):
"""
mac = hmac.new(
key=self._hmac_secret,
msg=user_id.encode('ascii'),
msg=user_id,
digestmod=sha256,
).hexdigest()
consent_uri = "%s_matrix/consent?%s" % (

View File

@@ -17,7 +17,6 @@ import gc
import logging
import sys
import psutil
from daemonize import Daemonize
from twisted.internet import error, reactor
@@ -25,6 +24,12 @@ from twisted.internet import error, reactor
from synapse.util import PreserveLoggingContext
from synapse.util.rlimit import change_resource_limit
try:
import affinity
except Exception:
affinity = None
logger = logging.getLogger(__name__)
@@ -84,20 +89,15 @@ def start_reactor(
with PreserveLoggingContext():
logger.info("Running")
if cpu_affinity is not None:
# Turn the bitmask into bits, reverse it so we go from 0 up
mask_to_bits = bin(cpu_affinity)[2:][::-1]
cpus = []
cpu_num = 0
for i in mask_to_bits:
if i == "1":
cpus.append(cpu_num)
cpu_num += 1
p = psutil.Process()
p.cpu_affinity(cpus)
if not affinity:
quit_with_error(
"Missing package 'affinity' required for cpu_affinity\n"
"option\n\n"
"Install by running:\n\n"
" pip install affinity\n\n"
)
logger.info("Setting CPU affinity to %s" % cpu_affinity)
affinity.set_process_affinity_mask(0, cpu_affinity)
change_resource_limit(soft_file_limit)
if gc_thresholds:
gc.set_threshold(*gc_thresholds)

View File

@@ -178,9 +178,6 @@ def start(config_options):
setup_logging(config, use_worker_options=True)
# This should only be done on the user directory worker or the master
config.update_user_directory = False
events.USE_FROZEN_DICTS = config.use_frozen_dicts
database_engine = create_engine(config.database_config)

View File

@@ -68,7 +68,7 @@ class PresenceStatusStubServlet(ClientV1RestServlet):
"Authorization": auth_headers,
}
result = yield self.http_client.get_json(
self.main_uri + request.uri.decode('ascii'),
self.main_uri + request.uri,
headers=headers,
)
defer.returnValue((200, result))
@@ -125,7 +125,7 @@ class KeyUploadServlet(RestServlet):
"Authorization": auth_headers,
}
result = yield self.http_client.post_json_get_json(
self.main_uri + request.uri.decode('ascii'),
self.main_uri + request.uri,
body,
headers=headers,
)

View File

@@ -20,7 +20,6 @@ import sys
from six import iteritems
import psutil
from prometheus_client import Gauge
from twisted.application import service
@@ -387,6 +386,7 @@ def setup(config_options):
hs.get_pusherpool().start()
hs.get_datastore().start_profiling()
hs.get_datastore().start_doing_background_updates()
hs.get_federation_client().start_get_pdu_cache()
reactor.callWhenRunning(start)
@@ -503,6 +503,7 @@ def run(hs):
def performance_stats_init():
try:
import psutil
process = psutil.Process()
# Ensure we can fetch both, and make the initial request for cpu_percent
# so the next request will use this as the initial point.
@@ -510,9 +511,12 @@ def run(hs):
process.cpu_percent(interval=None)
logger.info("report_stats can use psutil")
stats_process.append(process)
except (AttributeError):
logger.warning(
"Unable to read memory/cpu stats. Disabling reporting."
except (ImportError, AttributeError):
logger.warn(
"report_stats enabled but psutil is not installed or incorrect version."
" Disabling reporting of memory/cpu stats."
" Ensuring psutil is available will help matrix.org track performance"
" changes across releases."
)
def generate_user_daily_visit_stats():
@@ -527,13 +531,10 @@ def run(hs):
clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
# monthly active user limiting functionality
def reap_monthly_active_users():
return run_as_background_process(
"reap_monthly_active_users",
hs.get_datastore().reap_monthly_active_users,
)
clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
reap_monthly_active_users()
clock.looping_call(
hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60
)
hs.get_datastore().reap_monthly_active_users()
@defer.inlineCallbacks
def generate_monthly_active_users():
@@ -547,15 +548,12 @@ def run(hs):
registered_reserved_users_mau_gauge.set(float(reserved_count))
max_mau_gauge.set(float(hs.config.max_mau_value))
def start_generate_monthly_active_users():
return run_as_background_process(
"generate_monthly_active_users",
generate_monthly_active_users,
)
start_generate_monthly_active_users()
hs.get_datastore().initialise_reserved_users(
hs.config.mau_limits_reserved_threepids
)
generate_monthly_active_users()
if hs.config.limit_usage_by_mau:
clock.looping_call(start_generate_monthly_active_users, 5 * 60 * 1000)
clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
# End of monthly active user settings
if hs.config.report_stats:
@@ -571,7 +569,7 @@ def run(hs):
clock.call_later(5 * 60, start_phone_stats_home)
if hs.config.daemonize and hs.config.print_pidfile:
print(hs.config.pid_file)
print (hs.config.pid_file)
_base.start_reactor(
"synapse-homeserver",

View File

@@ -28,7 +28,6 @@ from synapse.config.logger import setup_logging
from synapse.http.site import SynapseSite
from synapse.metrics import RegistryProxy
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
from synapse.replication.slave.storage._base import __func__
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
from synapse.replication.slave.storage.events import SlavedEventStore
from synapse.replication.slave.storage.pushers import SlavedPusherStore
@@ -50,31 +49,31 @@ class PusherSlaveStore(
SlavedAccountDataStore
):
update_pusher_last_stream_ordering_and_success = (
__func__(DataStore.update_pusher_last_stream_ordering_and_success)
DataStore.update_pusher_last_stream_ordering_and_success.__func__
)
update_pusher_failing_since = (
__func__(DataStore.update_pusher_failing_since)
DataStore.update_pusher_failing_since.__func__
)
update_pusher_last_stream_ordering = (
__func__(DataStore.update_pusher_last_stream_ordering)
DataStore.update_pusher_last_stream_ordering.__func__
)
get_throttle_params_by_room = (
__func__(DataStore.get_throttle_params_by_room)
DataStore.get_throttle_params_by_room.__func__
)
set_throttle_params = (
__func__(DataStore.set_throttle_params)
DataStore.set_throttle_params.__func__
)
get_time_of_last_push_action_before = (
__func__(DataStore.get_time_of_last_push_action_before)
DataStore.get_time_of_last_push_action_before.__func__
)
get_profile_displayname = (
__func__(DataStore.get_profile_displayname)
DataStore.get_profile_displayname.__func__
)
@@ -161,11 +160,11 @@ class PusherReplicationHandler(ReplicationClientHandler):
else:
yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
elif stream_name == "events":
yield self.pusher_pool.on_new_notifications(
self.pusher_pool.on_new_notifications(
token, token,
)
elif stream_name == "receipts":
yield self.pusher_pool.on_new_receipts(
self.pusher_pool.on_new_receipts(
token, token, set(row.room_id for row in rows)
)
except Exception:
@@ -183,7 +182,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
def start_pusher(self, user_id, app_id, pushkey):
key = "%s:%s" % (app_id, pushkey)
logger.info("Starting pusher %r / %r", user_id, key)
return self.pusher_pool.start_pusher_by_id(app_id, pushkey, user_id)
return self.pusher_pool._refresh_pusher(app_id, pushkey, user_id)
def start(config_options):

View File

@@ -33,7 +33,7 @@ from synapse.http.server import JsonResource
from synapse.http.site import SynapseSite
from synapse.metrics import RegistryProxy
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
from synapse.replication.slave.storage._base import BaseSlavedStore, __func__
from synapse.replication.slave.storage._base import BaseSlavedStore
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
@@ -147,7 +147,7 @@ class SynchrotronPresence(object):
and haven't come back yet. If there are poke the master about them.
"""
now = self.clock.time_msec()
for user_id, last_sync_ms in list(self.users_going_offline.items()):
for user_id, last_sync_ms in self.users_going_offline.items():
if now - last_sync_ms > 10 * 1000:
self.users_going_offline.pop(user_id, None)
self.send_user_sync(user_id, False, last_sync_ms)
@@ -156,9 +156,9 @@ class SynchrotronPresence(object):
# TODO Hows this supposed to work?
pass
get_states = __func__(PresenceHandler.get_states)
get_state = __func__(PresenceHandler.get_state)
current_state_for_users = __func__(PresenceHandler.current_state_for_users)
get_states = PresenceHandler.get_states.__func__
get_state = PresenceHandler.get_state.__func__
current_state_for_users = PresenceHandler.current_state_for_users.__func__
def user_syncing(self, user_id, affect_presence):
if affect_presence:
@@ -208,7 +208,7 @@ class SynchrotronPresence(object):
) for row in rows]
for state in states:
self.user_to_current_state[state.user_id] = state
self.user_to_current_state[row.user_id] = state
stream_id = token
yield self.notify_from_replication(states, stream_id)

View File

@@ -28,7 +28,7 @@ if __name__ == "__main__":
sys.stderr.write("\n" + str(e) + "\n")
sys.exit(1)
print(getattr(config, key))
print (getattr(config, key))
sys.exit(0)
else:
sys.stderr.write("Unknown command %r\n" % (action,))

Some files were not shown because too many files have changed in this diff Show More