Compare commits
1 Commits
v1.85.2
...
erikj/work
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac05d88bcd |
@@ -31,6 +31,35 @@ sed -i \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies and the dev-docs
|
||||
# group from the toml file. This means we don't have to ensure compatibility between
|
||||
# old deps and dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
del data['tool']['poetry']['group']['dev-docs']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pip install poetry==1.3.2
|
||||
poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
49
.github/workflows/dependabot_changelog.yml
vendored
Normal file
49
.github/workflows/dependabot_changelog.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Write changelog for dependabot PR
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened # For debugging!
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push the commit. See
|
||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
||||
# for a similar example
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
add-changelog:
|
||||
runs-on: 'ubuntu-latest'
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
- name: Write, commit and push changelog
|
||||
env:
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
|
||||
git add changelog.d
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config user.name "GitHub Actions"
|
||||
git commit -m "Changelog"
|
||||
git push
|
||||
shell: bash
|
||||
# The `git push` above does not trigger CI on the dependabot PR.
|
||||
#
|
||||
# By default, workflows can't trigger other workflows when they're just using the
|
||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
||||
# recursive workflow loops by accident, because that'll get very expensive very
|
||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
||||
# See
|
||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||
#
|
||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
||||
|
||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
||||
# are sufficiently locked down to dependabot only as above.
|
||||
2
.github/workflows/docs-pr-netlify.yaml
vendored
2
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
path: book
|
||||
|
||||
- name: 📤 Deploy to Netlify
|
||||
uses: matrix-org/netlify-pr-preview@v2
|
||||
uses: matrix-org/netlify-pr-preview@v1
|
||||
with:
|
||||
path: book
|
||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||
|
||||
55
.github/workflows/tests.yml
vendored
55
.github/workflows/tests.yml
vendored
@@ -107,15 +107,14 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
# NB: I have two concerns with this action:
|
||||
# 1. We occasionally see odd mypy problems that aren't reproducible
|
||||
# locally with clean caches. I suspect some dodgy caching behaviour.
|
||||
# 2. The action uses GHA machinery that's deprecated
|
||||
# (https://github.com/AustinScola/mypy-cache-github-action/issues/277)
|
||||
# It may be simpler to use actions/cache ourselves to restore .mypy_cache.
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
key: mypy-cache-${{ github.context.sha }}
|
||||
restore-keys: mypy-cache-
|
||||
uses: AustinScola/mypy-cache-github-action@df56268388422ee282636ee2c7a9cc55ec644a41
|
||||
|
||||
- name: Run mypy
|
||||
run: poetry run mypy
|
||||
@@ -314,33 +313,41 @@ jobs:
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq update
|
||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.7'
|
||||
|
||||
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
||||
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
||||
# otherwise the `poetry install` step will error due to the poetry.lock
|
||||
# file being outdated.
|
||||
#
|
||||
# This caches the output of `Prepare old deps`, which should generate the
|
||||
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
||||
- uses: actions/cache@v3
|
||||
id: cache-poetry-old-deps
|
||||
name: Cache poetry.lock
|
||||
with:
|
||||
path: |
|
||||
poetry.lock
|
||||
pyproject.toml
|
||||
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
run: .ci/scripts/prepare_old_deps.sh
|
||||
|
||||
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
|
||||
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
|
||||
# we explicitly want to test that you can `pip install` using the oldest version
|
||||
# of poetry-core and setuptools-rust.
|
||||
- run: pip install .[all,test]
|
||||
# We only now install poetry so that `setup-python-poetry` caches the
|
||||
# right poetry.lock's dependencies.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: '3.7'
|
||||
poetry-version: "1.3.2"
|
||||
extras: "all test"
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
# Sanity check we can import/run Synapse
|
||||
- run: python -m synapse.app.homeserver --help
|
||||
|
||||
- run: python -m twisted.trial -j6 tests
|
||||
- run: poetry run trial -j6 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
|
||||
249
CHANGES.md
249
CHANGES.md
@@ -1,252 +1,3 @@
|
||||
Synapse 1.85.2 (2023-06-08)
|
||||
===========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix regression where using TLS for HTTP replication between workers did not work. Introduced in v1.85.0. ([\#15746](https://github.com/matrix-org/synapse/issues/15746))
|
||||
|
||||
|
||||
Synapse 1.85.1 (2023-06-07)
|
||||
===========================
|
||||
|
||||
Note: this release only fixes a bug that stopped some deployments from upgrading to v1.85.0. There is no need to upgrade to v1.85.1 if successfully running v1.85.0.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug in schema delta that broke upgrades for some deployments. Introduced in v1.85.0. ([\#15738](https://github.com/matrix-org/synapse/issues/15738), [\#15739](https://github.com/matrix-org/synapse/issues/15739))
|
||||
|
||||
|
||||
Synapse 1.85.0 (2023-06-06)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.85.0rc2.
|
||||
|
||||
|
||||
## Security advisory
|
||||
|
||||
The following issues are fixed in 1.85.0 (and RCs).
|
||||
|
||||
- [GHSA-26c5-ppr8-f33p](https://github.com/matrix-org/synapse/security/advisories/GHSA-26c5-ppr8-f33p) / [CVE-2023-32682](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-32683) — Low Severity
|
||||
|
||||
It may be possible for a deactivated user to login when using uncommon configurations.
|
||||
|
||||
- [GHSA-98px-6486-j7qc](https://github.com/matrix-org/synapse/security/advisories/GHSA-98px-6486-j7qc) / [CVE-2023-32683](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-32683) — Low Severity
|
||||
|
||||
A discovered oEmbed or image URL can bypass the `url_preview_url_blacklist` setting potentially allowing server side request forgery or bypassing network policies. Impact is limited to IP addresses allowed by the `url_preview_ip_range_blacklist` setting (by default this only allows public IPs).
|
||||
|
||||
See the advisories for more details. If you have any questions, email security@matrix.org.
|
||||
|
||||
|
||||
Synapse 1.85.0rc2 (2023-06-01)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a performance issue introduced in Synapse v1.83.0 which meant that purging rooms was very slow and database-intensive. ([\#15693](https://github.com/matrix-org/synapse/issues/15693))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate calling the `/register` endpoint with an unspecced `user` property for application services. ([\#15703](https://github.com/matrix-org/synapse/issues/15703))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Speed up background jobs `populate_full_user_id_user_filters` and `populate_full_user_id_profiles`. ([\#15700](https://github.com/matrix-org/synapse/issues/15700))
|
||||
|
||||
|
||||
Synapse 1.85.0rc1 (2023-05-30)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Improve performance of backfill requests by performing backfill of previously failed requests in the background. ([\#15585](https://github.com/matrix-org/synapse/issues/15585))
|
||||
- Add a new [admin API](https://matrix-org.github.io/synapse/v1.85/usage/administration/admin_api/index.html) to [create a new device for a user](https://matrix-org.github.io/synapse/v1.85/admin_api/user_admin_api.html#create-a-device). ([\#15611](https://github.com/matrix-org/synapse/issues/15611))
|
||||
- Add Unix socket support for Redis connections. Contributed by Jason Little. ([\#15644](https://github.com/matrix-org/synapse/issues/15644))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug where setting the read marker could fail when using message retention. Contributed by Nick @ Beeper (@fizzadar). ([\#15464](https://github.com/matrix-org/synapse/issues/15464))
|
||||
- Fix a long-standing bug where the `url_preview_url_blacklist` configuration setting was not applied to oEmbed or image URLs found while previewing a URL. ([\#15601](https://github.com/matrix-org/synapse/issues/15601))
|
||||
- Fix a long-standing bug where filters with multiple backslashes were rejected. ([\#15607](https://github.com/matrix-org/synapse/issues/15607))
|
||||
- Fix a bug introduced in Synapse 1.82.0 where the error message displayed when validation of the `app_service_config_files` config option fails would be incorrectly formatted. ([\#15614](https://github.com/matrix-org/synapse/issues/15614))
|
||||
- Fix a long-standing bug where deactivated users were still able to login using the custom `org.matrix.login.jwt` login type (if enabled). ([\#15624](https://github.com/matrix-org/synapse/issues/15624))
|
||||
- Fix a long-standing bug where deactivated users were able to login in uncommon situations. ([\#15634](https://github.com/matrix-org/synapse/issues/15634))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Warn users that at least 3.75GB of space is needed for the nix Synapse development environment. ([\#15613](https://github.com/matrix-org/synapse/issues/15613))
|
||||
- Remove outdated comment from the generated and sample homeserver log configs. ([\#15648](https://github.com/matrix-org/synapse/issues/15648))
|
||||
- Improve contributor docs to make it more clear that Rust is a necessary prerequisite. Contributed by @grantm. ([\#15668](https://github.com/matrix-org/synapse/issues/15668))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the old version of the R30 (30-day retained users) phone-home metric. ([\#10428](https://github.com/matrix-org/synapse/issues/10428))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Create dependabot changelogs at release time. ([\#15481](https://github.com/matrix-org/synapse/issues/15481))
|
||||
- Add not null constraint to column `full_user_id` of tables `profiles` and `user_filters`. ([\#15537](https://github.com/matrix-org/synapse/issues/15537))
|
||||
- Allow connecting to HTTP Replication Endpoints by using `worker_name` when constructing the request. ([\#15578](https://github.com/matrix-org/synapse/issues/15578))
|
||||
- Make the `thread_id` column on `event_push_actions`, `event_push_actions_staging`, and `event_push_summary` non-null. ([\#15597](https://github.com/matrix-org/synapse/issues/15597))
|
||||
- Run mypy type checking with the minimum supported Python version to catch new usage that isn't backwards-compatible. ([\#15602](https://github.com/matrix-org/synapse/issues/15602))
|
||||
- Fix subscriptable type usage in Python <3.9. ([\#15604](https://github.com/matrix-org/synapse/issues/15604))
|
||||
- Update internal terminology. ([\#15606](https://github.com/matrix-org/synapse/issues/15606), [\#15620](https://github.com/matrix-org/synapse/issues/15620))
|
||||
- Instrument `state` and `state_group` storage-related operations to better picture what's happening when tracing. ([\#15610](https://github.com/matrix-org/synapse/issues/15610), [\#15647](https://github.com/matrix-org/synapse/issues/15647))
|
||||
- Trace how many new events from the backfill response we need to process. ([\#15633](https://github.com/matrix-org/synapse/issues/15633))
|
||||
- Re-type config paths in `ConfigError`s to be `StrSequence`s instead of `Iterable[str]`s. ([\#15615](https://github.com/matrix-org/synapse/issues/15615))
|
||||
- Update Mutual Rooms ([MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666)) implementation to match new proposal text. ([\#15621](https://github.com/matrix-org/synapse/issues/15621))
|
||||
- Remove the unstable identifiers from faster joins ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706)). ([\#15625](https://github.com/matrix-org/synapse/issues/15625))
|
||||
- Fix the olddeps CI. ([\#15626](https://github.com/matrix-org/synapse/issues/15626))
|
||||
- Remove duplicate timestamp from test logs (`_trial_temp/test.log`). ([\#15636](https://github.com/matrix-org/synapse/issues/15636))
|
||||
- Fix two memory leaks in `trial` test runs. ([\#15630](https://github.com/matrix-org/synapse/issues/15630))
|
||||
- Limit the size of the `HomeServerConfig` cache in trial test runs. ([\#15646](https://github.com/matrix-org/synapse/issues/15646))
|
||||
- Improve type hints. ([\#15658](https://github.com/matrix-org/synapse/issues/15658), [\#15659](https://github.com/matrix-org/synapse/issues/15659))
|
||||
- Add requesting user id parameter to key claim methods in `TransportLayerClient`. ([\#15663](https://github.com/matrix-org/synapse/issues/15663))
|
||||
- Speed up rebuilding of the user directory for local users. ([\#15665](https://github.com/matrix-org/synapse/issues/15665))
|
||||
- Implement "option 2" for [MSC3820](https://github.com/matrix-org/matrix-spec-proposals/pull/3820): Room version 11. ([\#15666](https://github.com/matrix-org/synapse/issues/15666), [\#15678](https://github.com/matrix-org/synapse/issues/15678))
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump furo from 2023.3.27 to 2023.5.20. ([\#15642](https://github.com/matrix-org/synapse/issues/15642))
|
||||
* Bump log from 0.4.17 to 0.4.18. ([\#15681](https://github.com/matrix-org/synapse/issues/15681))
|
||||
* Bump prometheus-client from 0.16.0 to 0.17.0. ([\#15682](https://github.com/matrix-org/synapse/issues/15682))
|
||||
* Bump pydantic from 1.10.7 to 1.10.8. ([\#15685](https://github.com/matrix-org/synapse/issues/15685))
|
||||
* Bump pygithub from 1.58.1 to 1.58.2. ([\#15643](https://github.com/matrix-org/synapse/issues/15643))
|
||||
* Bump requests from 2.28.2 to 2.31.0. ([\#15651](https://github.com/matrix-org/synapse/issues/15651))
|
||||
* Bump sphinx from 6.1.3 to 6.2.1. ([\#15641](https://github.com/matrix-org/synapse/issues/15641))
|
||||
* Bump types-bleach from 6.0.0.1 to 6.0.0.3. ([\#15686](https://github.com/matrix-org/synapse/issues/15686))
|
||||
* Bump types-pillow from 9.5.0.2 to 9.5.0.4. ([\#15640](https://github.com/matrix-org/synapse/issues/15640))
|
||||
* Bump types-pyyaml from 6.0.12.9 to 6.0.12.10. ([\#15683](https://github.com/matrix-org/synapse/issues/15683))
|
||||
* Bump types-requests from 2.30.0.0 to 2.31.0.0. ([\#15684](https://github.com/matrix-org/synapse/issues/15684))
|
||||
* Bump types-setuptools from 67.7.0.2 to 67.8.0.0. ([\#15639](https://github.com/matrix-org/synapse/issues/15639))
|
||||
|
||||
Synapse 1.84.1 (2023-05-26)
|
||||
===========================
|
||||
|
||||
This patch release fixes a major issue with homeservers that do not have an `instance_map` defined but which do use workers.
|
||||
If you have already upgraded to Synapse 1.84.0 and your homeserver is working normally, then there is no need to update to this patch release.
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse v1.84.0 where workers do not start up when no `instance_map` was provided. ([\#15672](https://github.com/matrix-org/synapse/issues/15672))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add `dch` and `notify-send` to the development Nix flake so that the release script can be used. ([\#15673](https://github.com/matrix-org/synapse/issues/15673))
|
||||
|
||||
|
||||
Synapse 1.84.0 (2023-05-23)
|
||||
===========================
|
||||
|
||||
The `worker_replication_*` configuration settings have been deprecated in favour of configuring the main process consistently with other instances in the `instance_map`. The deprecated settings will be removed in Synapse v1.88.0, but changing your configuration in advance is recommended. See the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.84/docs/upgrade.md#upgrading-to-v1840) for more information.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.84.0rc1 where errors during startup were not reported correctly on Python < 3.10. ([\#15599](https://github.com/matrix-org/synapse/issues/15599))
|
||||
|
||||
|
||||
Synapse 1.84.0rc1 (2023-05-16)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add an option to prevent media downloads from configured domains. ([\#15197](https://github.com/matrix-org/synapse/issues/15197))
|
||||
- Add `forget_rooms_on_leave` config option to automatically forget rooms when users leave them or are removed from them. ([\#15224](https://github.com/matrix-org/synapse/issues/15224))
|
||||
- Add redis TLS configuration options. ([\#15312](https://github.com/matrix-org/synapse/issues/15312))
|
||||
- Add a config option to delay push notifications by a random amount, to discourage time-based profiling. ([\#15516](https://github.com/matrix-org/synapse/issues/15516))
|
||||
- Stabilize support for [MSC2659](https://github.com/matrix-org/matrix-spec-proposals/pull/2659): application service ping endpoint. Contributed by Tulir @ Beeper. ([\#15528](https://github.com/matrix-org/synapse/issues/15528))
|
||||
- Implement [MSC4009](https://github.com/matrix-org/matrix-spec-proposals/pull/4009) to expand the supported characters in Matrix IDs. ([\#15536](https://github.com/matrix-org/synapse/issues/15536))
|
||||
- Advertise support for Matrix 1.6 on `/_matrix/client/versions`. ([\#15559](https://github.com/matrix-org/synapse/issues/15559))
|
||||
- Print full error and stack-trace of any exception that occurs during startup/initialization. ([\#15569](https://github.com/matrix-org/synapse/issues/15569))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Don't fail on federation over TOR where SRV queries are not supported. Contributed by Zdzichu. ([\#15523](https://github.com/matrix-org/synapse/issues/15523))
|
||||
- Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data. ([\#15554](https://github.com/matrix-org/synapse/issues/15554), [\#15555](https://github.com/matrix-org/synapse/issues/15555))
|
||||
- Fix a long-standing bug where an invalid membership event could cause an internal server error. ([\#15564](https://github.com/matrix-org/synapse/issues/15564))
|
||||
- Require at least poetry-core v1.1.0. ([\#15566](https://github.com/matrix-org/synapse/issues/15566), [\#15571](https://github.com/matrix-org/synapse/issues/15571))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove need for `worker_replication_*` based settings in worker configuration yaml by placing this data directly on the `instance_map` instead. ([\#15491](https://github.com/matrix-org/synapse/issues/15491))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Add pkg-config package to Stage 0 to be able to build Dockerfile on ppc64le architecture. ([\#15567](https://github.com/matrix-org/synapse/issues/15567))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Clarify documentation of the "Create or modify account" Admin API. ([\#15544](https://github.com/matrix-org/synapse/issues/15544))
|
||||
- Fix path to the `statistics/database/rooms` admin API in documentation. ([\#15560](https://github.com/matrix-org/synapse/issues/15560))
|
||||
- Update and improve Mastodon Single Sign-On documentation. ([\#15587](https://github.com/matrix-org/synapse/issues/15587))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Use oEmbed to generate URL previews for YouTube Shorts. ([\#15025](https://github.com/matrix-org/synapse/issues/15025))
|
||||
- Create new `Client` for use with HTTP Replication between workers. Contributed by Jason Little. ([\#15470](https://github.com/matrix-org/synapse/issues/15470))
|
||||
- Bump pyicu from 2.10.2 to 2.11. ([\#15509](https://github.com/matrix-org/synapse/issues/15509))
|
||||
- Remove references to supporting per-user flag for [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654). ([\#15522](https://github.com/matrix-org/synapse/issues/15522))
|
||||
- Don't use a trusted key server when running the demo scripts. ([\#15527](https://github.com/matrix-org/synapse/issues/15527))
|
||||
- Speed up rebuilding of the user directory for local users. ([\#15529](https://github.com/matrix-org/synapse/issues/15529))
|
||||
- Speed up deleting of old rows in `event_push_actions`. ([\#15531](https://github.com/matrix-org/synapse/issues/15531))
|
||||
- Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment. ([\#15532](https://github.com/matrix-org/synapse/issues/15532), [\#15533](https://github.com/matrix-org/synapse/issues/15533), [\#15545](https://github.com/matrix-org/synapse/issues/15545))
|
||||
- Implement [MSC3987](https://github.com/matrix-org/matrix-spec-proposals/pull/3987) by removing `"dont_notify"` from the list of actions in default push rules. ([\#15534](https://github.com/matrix-org/synapse/issues/15534))
|
||||
- Move various module API callback registration methods to a dedicated class. ([\#15535](https://github.com/matrix-org/synapse/issues/15535))
|
||||
- Proxy `/user/devices` federation queries to application services for [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984). ([\#15539](https://github.com/matrix-org/synapse/issues/15539))
|
||||
- Factor out an `is_mine_server_name` method. ([\#15542](https://github.com/matrix-org/synapse/issues/15542))
|
||||
- Allow running Complement tests using [podman](https://podman.io/) by adding a `PODMAN` environment variable to `scripts-dev/complement.sh`. ([\#15543](https://github.com/matrix-org/synapse/issues/15543))
|
||||
- Bump serde from 1.0.160 to 1.0.162. ([\#15548](https://github.com/matrix-org/synapse/issues/15548))
|
||||
- Bump types-setuptools from 67.6.0.5 to 67.7.0.1. ([\#15549](https://github.com/matrix-org/synapse/issues/15549))
|
||||
- Bump sentry-sdk from 1.19.1 to 1.22.1. ([\#15550](https://github.com/matrix-org/synapse/issues/15550))
|
||||
- Bump ruff from 0.0.259 to 0.0.265. ([\#15551](https://github.com/matrix-org/synapse/issues/15551))
|
||||
- Bump hiredis from 2.2.2 to 2.2.3. ([\#15552](https://github.com/matrix-org/synapse/issues/15552))
|
||||
- Bump types-requests from 2.29.0.0 to 2.30.0.0. ([\#15553](https://github.com/matrix-org/synapse/issues/15553))
|
||||
- Add `org.matrix.msc3981` info to `/_matrix/client/versions`. ([\#15558](https://github.com/matrix-org/synapse/issues/15558))
|
||||
- Declare unstable support for [MSC3391](https://github.com/matrix-org/matrix-spec-proposals/pull/3391) under `/_matrix/client/versions` if the experimental implementation is enabled. ([\#15562](https://github.com/matrix-org/synapse/issues/15562))
|
||||
- Implement [MSC3821](https://github.com/matrix-org/matrix-spec-proposals/pull/3821) to update the redaction rules. ([\#15563](https://github.com/matrix-org/synapse/issues/15563))
|
||||
- Implement updated redaction rules from [MSC3389](https://github.com/matrix-org/matrix-spec-proposals/pull/3389). ([\#15565](https://github.com/matrix-org/synapse/issues/15565))
|
||||
- Allow `pip install` to use setuptools_rust 1.6.0 when building Synapse. ([\#15570](https://github.com/matrix-org/synapse/issues/15570))
|
||||
- Deal with upcoming Github Actions deprecations. ([\#15576](https://github.com/matrix-org/synapse/issues/15576))
|
||||
- Export `run_as_background_process` from the module API. ([\#15577](https://github.com/matrix-org/synapse/issues/15577))
|
||||
- Update build system requirements to allow building with poetry-core==1.6.0. ([\#15588](https://github.com/matrix-org/synapse/issues/15588))
|
||||
- Bump serde from 1.0.162 to 1.0.163. ([\#15589](https://github.com/matrix-org/synapse/issues/15589))
|
||||
- Bump phonenumbers from 8.13.7 to 8.13.11. ([\#15590](https://github.com/matrix-org/synapse/issues/15590))
|
||||
- Bump types-psycopg2 from 2.9.21.9 to 2.9.21.10. ([\#15591](https://github.com/matrix-org/synapse/issues/15591))
|
||||
- Bump types-commonmark from 0.9.2.2 to 0.9.2.3. ([\#15592](https://github.com/matrix-org/synapse/issues/15592))
|
||||
- Bump types-setuptools from 67.7.0.1 to 67.7.0.2. ([\#15594](https://github.com/matrix-org/synapse/issues/15594))
|
||||
|
||||
|
||||
Synapse 1.83.0 (2023-05-09)
|
||||
===========================
|
||||
|
||||
|
||||
15
Cargo.lock
generated
15
Cargo.lock
generated
@@ -132,9 +132,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.18"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -320,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.163"
|
||||
version = "1.0.162"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
|
||||
checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.163"
|
||||
version = "1.0.162"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
|
||||
checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
1
changelog.d/15025.misc
Normal file
1
changelog.d/15025.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use oEmbed to generate URL previews for YouTube Shorts.
|
||||
1
changelog.d/15197.feature
Normal file
1
changelog.d/15197.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an option to prevent media downloads from configured domains.
|
||||
1
changelog.d/15224.feature
Normal file
1
changelog.d/15224.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add `forget_rooms_on_leave` config option to automatically forget rooms when users leave them or are removed from them.
|
||||
1
changelog.d/15437.misc
Normal file
1
changelog.d/15437.misc
Normal file
@@ -0,0 +1 @@
|
||||
Make the `thread_id` column on `event_push_actions`, `event_push_actions_staging`, and `event_push_summary` non-null.
|
||||
1
changelog.d/15470.misc
Normal file
1
changelog.d/15470.misc
Normal file
@@ -0,0 +1 @@
|
||||
Create new `Client` for use with HTTP Replication between workers. Contributed by Jason Little.
|
||||
1
changelog.d/15509.misc
Normal file
1
changelog.d/15509.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump pyicu from 2.10.2 to 2.11.
|
||||
1
changelog.d/15516.feature
Normal file
1
changelog.d/15516.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a config option to delay push notifications by a random amount, to discourage time-based profiling.
|
||||
1
changelog.d/15522.misc
Normal file
1
changelog.d/15522.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove references to supporting per-user flag for [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) (#15522).
|
||||
1
changelog.d/15523.bugfix
Normal file
1
changelog.d/15523.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Don't fail on federation over TOR where SRV queries are not supported. Contributed by Zdzichu.
|
||||
1
changelog.d/15527.misc
Normal file
1
changelog.d/15527.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't use a trusted key server when running the demo scripts.
|
||||
1
changelog.d/15528.feature
Normal file
1
changelog.d/15528.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stabilize support for [MSC2659](https://github.com/matrix-org/matrix-spec-proposals/pull/2659): application service ping endpoint. Contributed by Tulir @ Beeper.
|
||||
1
changelog.d/15529.misc
Normal file
1
changelog.d/15529.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up rebuilding of the user directory for local users.
|
||||
1
changelog.d/15531.misc
Normal file
1
changelog.d/15531.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up deleting of old rows in `event_push_actions`.
|
||||
1
changelog.d/15532.misc
Normal file
1
changelog.d/15532.misc
Normal file
@@ -0,0 +1 @@
|
||||
Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment.
|
||||
1
changelog.d/15533.misc
Normal file
1
changelog.d/15533.misc
Normal file
@@ -0,0 +1 @@
|
||||
Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment.
|
||||
1
changelog.d/15534.misc
Normal file
1
changelog.d/15534.misc
Normal file
@@ -0,0 +1 @@
|
||||
Implement [MSC3987](https://github.com/matrix-org/matrix-spec-proposals/pull/3987) by removing `"dont_notify"` from the list of actions in default push rules.
|
||||
1
changelog.d/15535.misc
Normal file
1
changelog.d/15535.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move various module API callback registration methods to a dedicated class.
|
||||
1
changelog.d/15536.feature
Normal file
1
changelog.d/15536.feature
Normal file
@@ -0,0 +1 @@
|
||||
Implement [MSC4009](https://github.com/matrix-org/matrix-spec-proposals/pull/4009) to expand the supported characters in Matrix IDs.
|
||||
1
changelog.d/15539.misc
Normal file
1
changelog.d/15539.misc
Normal file
@@ -0,0 +1 @@
|
||||
Proxy `/user/devices` federation queries to application services for [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984).
|
||||
1
changelog.d/15542.misc
Normal file
1
changelog.d/15542.misc
Normal file
@@ -0,0 +1 @@
|
||||
Factor out an `is_mine_server_name` method.
|
||||
1
changelog.d/15543.misc
Normal file
1
changelog.d/15543.misc
Normal file
@@ -0,0 +1 @@
|
||||
Allow running Complement tests using [podman](https://podman.io/) by adding a `PODMAN` environment variable to `scripts-dev/complement.sh`.
|
||||
1
changelog.d/15544.doc
Normal file
1
changelog.d/15544.doc
Normal file
@@ -0,0 +1 @@
|
||||
Clarify documentation of the "Create or modify account" Admin API.
|
||||
1
changelog.d/15545.misc
Normal file
1
changelog.d/15545.misc
Normal file
@@ -0,0 +1 @@
|
||||
Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment.
|
||||
1
changelog.d/15548.misc
Normal file
1
changelog.d/15548.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump serde from 1.0.160 to 1.0.162.
|
||||
1
changelog.d/15549.misc
Normal file
1
changelog.d/15549.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-setuptools from 67.6.0.5 to 67.7.0.1.
|
||||
1
changelog.d/15550.misc
Normal file
1
changelog.d/15550.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump sentry-sdk from 1.19.1 to 1.22.1.
|
||||
1
changelog.d/15551.misc
Normal file
1
changelog.d/15551.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump ruff from 0.0.259 to 0.0.265.
|
||||
1
changelog.d/15552.misc
Normal file
1
changelog.d/15552.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump hiredis from 2.2.2 to 2.2.3.
|
||||
1
changelog.d/15553.misc
Normal file
1
changelog.d/15553.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-requests from 2.29.0.0 to 2.30.0.0.
|
||||
1
changelog.d/15554.bugfix
Normal file
1
changelog.d/15554.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data.
|
||||
1
changelog.d/15555.bugfix
Normal file
1
changelog.d/15555.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data.
|
||||
1
changelog.d/15560.doc
Normal file
1
changelog.d/15560.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix path to the `statistics/database/rooms` admin API in documentation.
|
||||
@@ -70,10 +70,6 @@ redis:
|
||||
port: 6379
|
||||
# dbid: <redis_logical_db_id>
|
||||
# password: <secret_password>
|
||||
# use_tls: True
|
||||
# certificate_file: <path_to_certificate>
|
||||
# private_key_file: <path_to_private_key>
|
||||
# ca_file: <path_to_ca_certificate>
|
||||
```
|
||||
|
||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
||||
|
||||
48
debian/changelog
vendored
48
debian/changelog
vendored
@@ -1,51 +1,3 @@
|
||||
matrix-synapse-py3 (1.85.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
|
||||
|
||||
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 May 2023 11:12:02 +0100
|
||||
|
||||
matrix-synapse-py3 (1.83.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.83.0.
|
||||
|
||||
@@ -37,7 +37,7 @@ RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential curl git libffi-dev libssl-dev pkg-config \
|
||||
build-essential curl git libffi-dev libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install rust and ensure its in the PATH.
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
worker_app: "{{ app }}"
|
||||
worker_name: "{{ name }}"
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: {{ port }}
|
||||
|
||||
@@ -69,9 +69,6 @@ import yaml
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
MAIN_PROCESS_INSTANCE_NAME = "main"
|
||||
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
|
||||
MAIN_PROCESS_REPLICATION_PORT = 9093
|
||||
|
||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
||||
# during processing with the name of the worker.
|
||||
@@ -722,8 +719,8 @@ def generate_worker_files(
|
||||
# shared config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"port": 9093,
|
||||
"bind_address": "127.0.0.1",
|
||||
"type": "http",
|
||||
"resources": [{"names": ["replication"]}],
|
||||
}
|
||||
@@ -873,14 +870,6 @@ def generate_worker_files(
|
||||
|
||||
workers_in_use = len(requested_worker_types) > 0
|
||||
|
||||
# If there are workers, add the main process to the instance_map too.
|
||||
if workers_in_use:
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
}
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
"/conf/shared.yaml.j2",
|
||||
|
||||
@@ -813,33 +813,6 @@ The following fields are returned in the JSON response body:
|
||||
|
||||
- `total` - Total number of user's devices.
|
||||
|
||||
### Create a device
|
||||
|
||||
Creates a new device for a specific `user_id` and `device_id`. Does nothing if the `device_id`
|
||||
exists already.
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v2/users/<user_id>/devices
|
||||
|
||||
{
|
||||
"device_id": "QBUAZIFURK"
|
||||
}
|
||||
```
|
||||
|
||||
An empty JSON dict is returned.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- `user_id` - fully qualified: for example, `@user:server.com`.
|
||||
|
||||
The following fields are required in the JSON request body:
|
||||
|
||||
- `device_id` - The device ID to create.
|
||||
|
||||
### Delete multiple devices
|
||||
Deletes the given devices for a specific `user_id`, and invalidates
|
||||
any access token associated with them.
|
||||
|
||||
@@ -22,9 +22,6 @@ on Windows is not officially supported.
|
||||
|
||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://www.python.org/downloads/). Your Python also needs support for [virtual environments](https://docs.python.org/3/library/venv.html). This is usually built-in, but some Linux distributions like Debian and Ubuntu split it out into its own package. Running `sudo apt install python3-venv` should be enough.
|
||||
|
||||
A recent version of the Rust compiler is needed to build the native modules. The
|
||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
||||
|
||||
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
||||
@@ -33,6 +30,9 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
A recent version of the Rust compiler is needed to build the native modules. The
|
||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
@@ -53,11 +53,6 @@ can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
|
||||
Before installing the Python dependencies, make sure you have installed a recent version
|
||||
of Rust (see the "What do I need?" section above). The easiest way of installing the
|
||||
latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||
and development environment. Once you have installed Python 3 and added the
|
||||
source, you should install `poetry`.
|
||||
@@ -81,8 +76,7 @@ cd path/where/you/have/cloned/the/repository
|
||||
poetry install --extras all
|
||||
```
|
||||
|
||||
This will install the runtime and developer dependencies for the project. Be sure to check
|
||||
that the `poetry install` step completed cleanly.
|
||||
This will install the runtime and developer dependencies for the project.
|
||||
|
||||
## Running Synapse via poetry
|
||||
|
||||
@@ -90,31 +84,14 @@ To start a local instance of Synapse in the locked poetry environment, create a
|
||||
|
||||
```sh
|
||||
cp docs/sample_config.yaml homeserver.yaml
|
||||
cp docs/sample_log_config.yaml log_config.yaml
|
||||
```
|
||||
|
||||
Now edit `homeserver.yaml`, things you might want to change include:
|
||||
|
||||
- Set a `server_name`
|
||||
- Adjusting paths to be correct for your system like the `log_config` to point to the log config you just copied
|
||||
- Using a [PostgreSQL database instead of SQLite](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database)
|
||||
- Adding a [`registration_shared_secret`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration_shared_secret) so you can use [`register_new_matrix_user` command](https://matrix-org.github.io/synapse/latest/setup/installation.html#registering-a-user).
|
||||
|
||||
And then run Synapse with the following command:
|
||||
Now edit homeserver.yaml, and run Synapse with:
|
||||
|
||||
```sh
|
||||
poetry run python -m synapse.app.homeserver -c homeserver.yaml
|
||||
```
|
||||
|
||||
If you get an error like the following:
|
||||
|
||||
```
|
||||
importlib.metadata.PackageNotFoundError: matrix-synapse
|
||||
```
|
||||
|
||||
this probably indicates that the `poetry install` step did not complete cleanly - go back and
|
||||
resolve any issues and re-run until successful.
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
|
||||
|
||||
@@ -260,17 +260,15 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
|
||||
## ...handle a Dependabot pull request?
|
||||
|
||||
Synapse uses Dependabot to keep the `poetry.lock` and `Cargo.lock` file
|
||||
up-to-date with the latest releases of our dependencies. The changelog check is
|
||||
omitted for Dependabot PRs; the release script will include them in the
|
||||
changelog.
|
||||
|
||||
When reviewing a dependabot PR, ensure that:
|
||||
Synapse uses Dependabot to keep the `poetry.lock` file up-to-date. When it
|
||||
creates a pull request a GitHub Action will run to automatically create a changelog
|
||||
file. Ensure that:
|
||||
|
||||
* the lockfile changes look reasonable;
|
||||
* the upstream changelog file (linked in the description) doesn't include any
|
||||
breaking changes;
|
||||
* continuous integration passes.
|
||||
* continuous integration passes (due to permissions, the GitHub Actions run on
|
||||
the changelog commit will fail, look at the initial commit of the pull request);
|
||||
|
||||
In particular, any updates to the type hints (usually packages which start with `types-`)
|
||||
should be safe to merge if linting passes.
|
||||
|
||||
@@ -46,9 +46,6 @@ instead.
|
||||
|
||||
If the authentication is unsuccessful, the module must return `None`.
|
||||
|
||||
Note that the user is not automatically registered, the `register_user(..)` method of
|
||||
the [module API](writing_a_module.html) can be used to lazily create users.
|
||||
|
||||
If multiple modules register an auth checker for the same login type but with different
|
||||
fields, Synapse will refuse to start.
|
||||
|
||||
|
||||
@@ -569,7 +569,7 @@ You should receive a response similar to the following. Make sure to save it.
|
||||
{"client_id":"someclientid_123","client_secret":"someclientsecret_123","id":"12345","name":"my_synapse_app","redirect_uri":"https://[synapse_public_baseurl]/_synapse/client/oidc/callback","website":null,"vapid_key":"somerandomvapidkey_123"}
|
||||
```
|
||||
|
||||
As the Synapse login mechanism needs an attribute to uniquely identify users, and Mastodon's endpoint does not return a `sub` property, an alternative `subject_template` has to be set. Your Synapse configuration should include the following:
|
||||
As the Synapse login mechanism needs an attribute to uniquely identify users, and Mastodon's endpoint does not return a `sub` property, an alternative `subject_claim` has to be set. Your Synapse configuration should include the following:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
@@ -585,9 +585,7 @@ oidc_providers:
|
||||
scopes: ["read"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_template: "{{ user.id }}"
|
||||
localpart_template: "{{ user.username }}"
|
||||
display_name_template: "{{ user.display_name }}"
|
||||
subject_claim: "id"
|
||||
```
|
||||
|
||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||
|
||||
@@ -30,6 +30,12 @@ minimal.
|
||||
|
||||
See [the TCP replication documentation](tcp_replication.md).
|
||||
|
||||
### The Slaved DataStore
|
||||
|
||||
There are read-only version of the synapse storage layer in
|
||||
`synapse/replication/slave/storage` that use the response of the
|
||||
replication API to invalidate their caches.
|
||||
|
||||
### The TCP Replication Module
|
||||
Information about how the tcp replication module is structured, including how
|
||||
the classes interact, can be found in
|
||||
|
||||
@@ -68,7 +68,9 @@ root:
|
||||
# Write logs to the `buffer` handler, which will buffer them together in memory,
|
||||
# then write them to a file.
|
||||
#
|
||||
# Replace "buffer" with "console" to log to stderr instead.
|
||||
# Replace "buffer" with "console" to log to stderr instead. (Note that you'll
|
||||
# also need to update the configuration for the `twisted` logger above, in
|
||||
# this case.)
|
||||
#
|
||||
handlers: [buffer]
|
||||
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8083
|
||||
|
||||
100
docs/upgrade.md
100
docs/upgrade.md
@@ -88,106 +88,6 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.85.0
|
||||
|
||||
## Application service registration with "user" property deprecation
|
||||
|
||||
Application services should ensure they call the `/register` endpoint with a
|
||||
`username` property. The legacy `user` property is considered deprecated and
|
||||
should no longer be included.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service login.
|
||||
|
||||
# Upgrading to v1.84.0
|
||||
|
||||
## Deprecation of `worker_replication_*` configuration settings
|
||||
|
||||
When using workers,
|
||||
|
||||
* `worker_replication_host`
|
||||
* `worker_replication_http_port`
|
||||
* `worker_replication_http_tls`
|
||||
|
||||
should now be removed from individual worker YAML configurations and the main process should instead be added to the `instance_map`
|
||||
in the shared YAML configuration, using the name `main`.
|
||||
|
||||
The old `worker_replication_*` settings are now considered deprecated and are expected to be removed in Synapse v1.88.0.
|
||||
|
||||
|
||||
### Example change
|
||||
|
||||
#### Before:
|
||||
|
||||
Shared YAML
|
||||
```yaml
|
||||
instance_map:
|
||||
generic_worker1:
|
||||
host: localhost
|
||||
port: 5678
|
||||
tls: false
|
||||
```
|
||||
|
||||
Worker YAML
|
||||
```yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker1
|
||||
|
||||
worker_replication_host: localhost
|
||||
worker_replication_http_port: 3456
|
||||
worker_replication_http_tls: false
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 1234
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
- type: http
|
||||
port: 5678
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
```
|
||||
|
||||
|
||||
#### After:
|
||||
|
||||
Shared YAML
|
||||
```yaml
|
||||
instance_map:
|
||||
main:
|
||||
host: localhost
|
||||
port: 3456
|
||||
tls: false
|
||||
generic_worker1:
|
||||
host: localhost
|
||||
port: 5678
|
||||
tls: false
|
||||
```
|
||||
|
||||
Worker YAML
|
||||
```yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker1
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 1234
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
- type: http
|
||||
port: 5678
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
|
||||
```
|
||||
Notes:
|
||||
* `tls` is optional but mirrors the functionality of `worker_replication_http_tls`
|
||||
|
||||
|
||||
# Upgrading to v1.81.0
|
||||
|
||||
## Application service path & authentication deprecations
|
||||
|
||||
@@ -42,6 +42,11 @@ The following statistics are sent to the configured reporting endpoint:
|
||||
| `daily_e2ee_messages` | int | The number of (state) events with the type `m.room.encrypted` seen in the last 24 hours. |
|
||||
| `daily_sent_messages` | int | The number of (state) events sent by a local user with the type `m.room.message` seen in the last 24 hours. |
|
||||
| `daily_sent_e2ee_messages` | int | The number of (state) events sent by a local user with the type `m.room.encrypted` seen in the last 24 hours. |
|
||||
| `r30_users_all` | int | The number of 30 day retained users, defined as users who have created their accounts more than 30 days ago, where they were last seen at most 30 days ago and where those two timestamps are over 30 days apart. Includes clients that do not fit into the below r30 client types. |
|
||||
| `r30_users_android` | int | The number of 30 day retained users, as defined above. Filtered only to clients with "Android" in the user agent string. |
|
||||
| `r30_users_ios` | int | The number of 30 day retained users, as defined above. Filtered only to clients with "iOS" in the user agent string. |
|
||||
| `r30_users_electron` | int | The number of 30 day retained users, as defined above. Filtered only to clients with "Electron" in the user agent string. |
|
||||
| `r30_users_web` | int | The number of 30 day retained users, as defined above. Filtered only to clients with "Mozilla" or "Gecko" in the user agent string. |
|
||||
| `r30v2_users_all` | int | The number of 30 day retained users, with a revised algorithm. Defined as users that appear more than once in the past 60 days, and have more than 30 days between the most and least recent appearances in the past 60 days. Includes clients that do not fit into the below r30 client types. |
|
||||
| `r30v2_users_android` | int | The number of 30 day retained users, as defined above. Filtered only to clients with ("riot" or "element") and "android" (case-insensitive) in the user agent string. |
|
||||
| `r30v2_users_ios` | int | The number of 30 day retained users, as defined above. Filtered only to clients with ("riot" or "element") and "ios" (case-insensitive) in the user agent string. |
|
||||
|
||||
@@ -3884,20 +3884,15 @@ federation_sender_instances:
|
||||
### `instance_map`
|
||||
|
||||
When using workers this should be a map from [`worker_name`](#worker_name) to the
|
||||
HTTP replication listener of the worker, if configured, and to the main process.
|
||||
HTTP replication listener of the worker, if configured.
|
||||
Each worker declared under [`stream_writers`](../../workers.md#stream-writers) needs
|
||||
a HTTP replication listener, and that listener should be included in the `instance_map`.
|
||||
The main process also needs an entry on the `instance_map`, and it should be listed under
|
||||
`main` **if even one other worker exists**. Ensure the port matches with what is declared
|
||||
inside the `listener` block for a `replication` listener.
|
||||
|
||||
(The main process also needs an HTTP replication listener, but it should not be
|
||||
listed in the `instance_map`.)
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
instance_map:
|
||||
main:
|
||||
host: localhost
|
||||
port: 8030
|
||||
worker1:
|
||||
host: localhost
|
||||
port: 8034
|
||||
@@ -3979,22 +3974,11 @@ This setting has the following sub-options:
|
||||
* `enabled`: whether to use Redis support. Defaults to false.
|
||||
* `host` and `port`: Optional host and port to use to connect to redis. Defaults to
|
||||
localhost and 6379
|
||||
* `path`: The full path to a local Unix socket file. **If this is used, `host` and
|
||||
`port` are ignored.** Defaults to `/tmp/redis.sock'
|
||||
* `password`: Optional password if configured on the Redis instance.
|
||||
* `dbid`: Optional redis dbid if needs to connect to specific redis logical db.
|
||||
* `use_tls`: Whether to use tls connection. Defaults to false.
|
||||
* `certificate_file`: Optional path to the certificate file
|
||||
* `private_key_file`: Optional path to the private key file
|
||||
* `ca_file`: Optional path to the CA certificate file. Use this one or:
|
||||
* `ca_path`: Optional path to the folder containing the CA certificate file
|
||||
|
||||
_Added in Synapse 1.78.0._
|
||||
|
||||
_Changed in Synapse 1.84.0: Added use\_tls, certificate\_file, private\_key\_file, ca\_file and ca\_path attributes_
|
||||
|
||||
_Changed in Synapse 1.85.0: Added path option to use a local Unix socket_
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
redis:
|
||||
@@ -4003,10 +3987,6 @@ redis:
|
||||
port: 6379
|
||||
password: <secret_password>
|
||||
dbid: <dbid>
|
||||
#use_tls: True
|
||||
#certificate_file: <path_to_the_certificate_file>
|
||||
#private_key_file: <path_to_the_private_key_file>
|
||||
#ca_file: <path_to_the_ca_certificate_file>
|
||||
```
|
||||
---
|
||||
## Individual worker configuration
|
||||
@@ -4044,7 +4024,6 @@ worker_name: generic_worker1
|
||||
```
|
||||
---
|
||||
### `worker_replication_host`
|
||||
*Deprecated as of version 1.84.0. Place `host` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.*
|
||||
|
||||
The HTTP replication endpoint that it should talk to on the main Synapse process.
|
||||
The main Synapse process defines this with a `replication` resource in
|
||||
@@ -4056,7 +4035,6 @@ worker_replication_host: 127.0.0.1
|
||||
```
|
||||
---
|
||||
### `worker_replication_http_port`
|
||||
*Deprecated as of version 1.84.0. Place `port` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.*
|
||||
|
||||
The HTTP replication port that it should talk to on the main Synapse process.
|
||||
The main Synapse process defines this with a `replication` resource in
|
||||
@@ -4068,7 +4046,6 @@ worker_replication_http_port: 9093
|
||||
```
|
||||
---
|
||||
### `worker_replication_http_tls`
|
||||
*Deprecated as of version 1.84.0. Place `tls` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.*
|
||||
|
||||
Whether TLS should be used for talking to the HTTP replication port on the main
|
||||
Synapse process.
|
||||
@@ -4094,9 +4071,9 @@ A worker can handle HTTP requests. To do so, a `worker_listeners` option
|
||||
must be declared, in the same way as the [`listeners` option](#listeners)
|
||||
in the shared config.
|
||||
|
||||
Workers declared in [`stream_writers`](#stream_writers) and [`instance_map`](#instance_map)
|
||||
will need to include a `replication` listener here, in order to accept internal HTTP
|
||||
requests from other workers.
|
||||
Workers declared in [`stream_writers`](#stream_writers) will need to include a
|
||||
`replication` listener here, in order to accept internal HTTP requests from
|
||||
other workers.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
|
||||
@@ -87,18 +87,12 @@ shared configuration file.
|
||||
|
||||
### Shared configuration
|
||||
|
||||
Normally, only a few changes are needed to make an existing configuration
|
||||
file suitable for use with workers:
|
||||
* First, you need to enable an
|
||||
Normally, only a couple of changes are needed to make an existing configuration
|
||||
file suitable for use with workers. First, you need to enable an
|
||||
["HTTP replication listener"](usage/configuration/config_documentation.md#listeners)
|
||||
for the main process
|
||||
* Secondly, you need to enable
|
||||
[redis-based replication](usage/configuration/config_documentation.md#redis)
|
||||
* You will need to add an [`instance_map`](usage/configuration/config_documentation.md#instance_map)
|
||||
with the `main` process defined, as well as the relevant connection information from
|
||||
it's HTTP `replication` listener (defined in step 1 above). Note that the `host` defined
|
||||
is the address the worker needs to look for the `main` process at, not necessarily the same address that is bound to.
|
||||
* Optionally, a [shared secret](usage/configuration/config_documentation.md#worker_replication_secret)
|
||||
for the main process; and secondly, you need to enable
|
||||
[redis-based replication](usage/configuration/config_documentation.md#redis).
|
||||
Optionally, a [shared secret](usage/configuration/config_documentation.md#worker_replication_secret)
|
||||
can be used to authenticate HTTP traffic between workers. For example:
|
||||
|
||||
```yaml
|
||||
@@ -117,11 +111,6 @@ worker_replication_secret: ""
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
|
||||
instance_map:
|
||||
main:
|
||||
host: 'localhost'
|
||||
port: 9093
|
||||
```
|
||||
|
||||
See the [configuration manual](usage/configuration/config_documentation.md)
|
||||
@@ -141,13 +130,13 @@ In the config file for each worker, you must specify:
|
||||
* The type of worker ([`worker_app`](usage/configuration/config_documentation.md#worker_app)).
|
||||
The currently available worker applications are listed [below](#available-worker-applications).
|
||||
* A unique name for the worker ([`worker_name`](usage/configuration/config_documentation.md#worker_name)).
|
||||
* The HTTP replication endpoint that it should talk to on the main synapse process
|
||||
([`worker_replication_host`](usage/configuration/config_documentation.md#worker_replication_host) and
|
||||
[`worker_replication_http_port`](usage/configuration/config_documentation.md#worker_replication_http_port)).
|
||||
* If handling HTTP requests, a [`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners) option
|
||||
with an `http` listener.
|
||||
* **Synapse 1.72 and older:** if handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
|
||||
the main process (`worker_main_http_uri`). This config option is no longer required and is ignored when running Synapse 1.73 and newer.
|
||||
* **Synapse 1.83 and older:** The HTTP replication endpoint that the worker should talk to on the main synapse process
|
||||
([`worker_replication_host`](usage/configuration/config_documentation.md#worker_replication_host) and
|
||||
[`worker_replication_http_port`](usage/configuration/config_documentation.md#worker_replication_http_port)). If using Synapse 1.84 and newer, these are not needed if `main` is defined on the [shared configuration](#shared-configuration) `instance_map`
|
||||
|
||||
For example:
|
||||
|
||||
@@ -428,14 +417,11 @@ effects of bursts of events from that bridge on events sent by normal users.
|
||||
Additionally, the writing of specific streams (such as events) can be moved off
|
||||
of the main process to a particular worker.
|
||||
|
||||
To enable this, the worker must have:
|
||||
* An [HTTP `replication` listener](usage/configuration/config_documentation.md#listeners) configured,
|
||||
* Have a [`worker_name`](usage/configuration/config_documentation.md#worker_name)
|
||||
To enable this, the worker must have a
|
||||
[HTTP `replication` listener](usage/configuration/config_documentation.md#listeners) configured,
|
||||
have a [`worker_name`](usage/configuration/config_documentation.md#worker_name)
|
||||
and be listed in the [`instance_map`](usage/configuration/config_documentation.md#instance_map)
|
||||
config.
|
||||
* Have the main process declared on the [`instance_map`](usage/configuration/config_documentation.md#instance_map) as well.
|
||||
|
||||
Note: The same worker can handle multiple streams, but unless otherwise documented,
|
||||
config. The same worker can handle multiple streams, but unless otherwise documented,
|
||||
each stream can only have a single writer.
|
||||
|
||||
For example, to move event persistence off to a dedicated worker, the shared
|
||||
@@ -443,9 +429,6 @@ configuration would include:
|
||||
|
||||
```yaml
|
||||
instance_map:
|
||||
main:
|
||||
host: localhost
|
||||
port: 8030
|
||||
event_persister1:
|
||||
host: localhost
|
||||
port: 8034
|
||||
|
||||
57
flake.nix
57
flake.nix
@@ -1,30 +1,35 @@
|
||||
# A Nix flake that sets up a complete Synapse development environment. Dependencies
|
||||
# A nix flake that sets up a complete Synapse development environment. Dependencies
|
||||
# for the SyTest (https://github.com/matrix-org/sytest) and Complement
|
||||
# (https://github.com/matrix-org/complement) Matrix homeserver test suites are also
|
||||
# installed automatically.
|
||||
#
|
||||
# You must have already installed Nix (https://nixos.org) on your system to use this.
|
||||
# Nix can be installed on Linux or MacOS; NixOS is not required. Windows is not
|
||||
# directly supported, but Nix can be installed inside of WSL2 or even Docker
|
||||
# You must have already installed nix (https://nixos.org) on your system to use this.
|
||||
# nix can be installed on Linux or MacOS; NixOS is not required. Windows is not
|
||||
# directly supported, but nix can be installed inside of WSL2 or even Docker
|
||||
# containers. Please refer to https://nixos.org/download for details.
|
||||
#
|
||||
# You must also enable support for flakes in Nix. See the following for how to
|
||||
# do so permanently: https://nixos.wiki/wiki/Flakes#Enable_flakes
|
||||
#
|
||||
# Be warned: you'll need over 3.75 GB of free space to download all the dependencies.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# With Nix installed, navigate to the directory containing this flake and run
|
||||
# With nix installed, navigate to the directory containing this flake and run
|
||||
# `nix develop --impure`. The `--impure` is necessary in order to store state
|
||||
# locally from "services", such as PostgreSQL and Redis.
|
||||
#
|
||||
# You should now be dropped into a new shell with all programs and dependencies
|
||||
# availabile to you!
|
||||
#
|
||||
# You can start up pre-configured local Synapse, PostgreSQL and Redis instances by
|
||||
# You can start up pre-configured, local PostgreSQL and Redis instances by
|
||||
# running: `devenv up`. To stop them, use Ctrl-C.
|
||||
#
|
||||
# A PostgreSQL database called 'synapse' will be set up for you, along with
|
||||
# a PostgreSQL user named 'synapse_user'.
|
||||
# The 'host' can be found by running `echo $PGHOST` with the development
|
||||
# shell activated. Use these values to configure your Synapse to connect
|
||||
# to the local PostgreSQL database. You do not need to specify a password.
|
||||
# https://matrix-org.github.io/synapse/latest/postgres
|
||||
#
|
||||
# All state (the venv, postgres and redis data and config) are stored in
|
||||
# .devenv/state. Deleting a file from here and then re-entering the shell
|
||||
# will recreate these files from scratch.
|
||||
@@ -61,7 +66,7 @@
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in {
|
||||
# Everything is configured via devenv - a Nix module for creating declarative
|
||||
# Everything is configured via devenv - a nix module for creating declarative
|
||||
# developer environments. See https://devenv.sh/reference/options/ for a list
|
||||
# of all possible options.
|
||||
default = devenv.lib.mkShell {
|
||||
@@ -95,10 +100,6 @@
|
||||
|
||||
# For building the Synapse documentation website.
|
||||
mdbook
|
||||
|
||||
# For releasing Synapse
|
||||
debian-devscripts # (`dch` for manipulating the Debian changelog)
|
||||
libnotify # (the release script uses `notify-send` to tell you when CI jobs are done)
|
||||
];
|
||||
|
||||
# Install Python and manage a virtualenv with Poetry.
|
||||
@@ -152,39 +153,11 @@
|
||||
# Redis is needed in order to run Synapse in worker mode.
|
||||
services.redis.enable = true;
|
||||
|
||||
# Configure and start Synapse. Before starting Synapse, this shell code:
|
||||
# * generates a default homeserver.yaml config file if one does not exist, and
|
||||
# * ensures a directory containing two additional homeserver config files exists;
|
||||
# one to configure using the development environment's PostgreSQL as the
|
||||
# database backend and another for enabling Redis support.
|
||||
process.before = ''
|
||||
python -m synapse.app.homeserver -c homeserver.yaml --generate-config --server-name=synapse.dev --report-stats=no
|
||||
mkdir -p homeserver-config-overrides.d
|
||||
cat > homeserver-config-overrides.d/database.yaml << EOF
|
||||
## Do not edit this file. This file is generated by flake.nix
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
user: synapse_user
|
||||
database: synapse
|
||||
host: $PGHOST
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
EOF
|
||||
cat > homeserver-config-overrides.d/redis.yaml << EOF
|
||||
## Do not edit this file. This file is generated by flake.nix
|
||||
redis:
|
||||
enabled: true
|
||||
EOF
|
||||
'';
|
||||
# Start synapse when `devenv up` is run.
|
||||
processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml --config-directory homeserver-config-overrides.d";
|
||||
|
||||
# Define the perl modules we require to run SyTest.
|
||||
#
|
||||
# This list was compiled by cross-referencing https://metacpan.org/
|
||||
# with the modules defined in './cpanfile' and then finding the
|
||||
# corresponding Nix packages on https://search.nixos.org/packages.
|
||||
# corresponding nix packages on https://search.nixos.org/packages.
|
||||
#
|
||||
# This was done until `./install-deps.pl --dryrun` produced no output.
|
||||
env.PERL5LIB = "${with pkgs.perl536Packages; makePerlPath [
|
||||
|
||||
6
mypy.ini
6
mypy.ini
@@ -13,9 +13,6 @@ no_implicit_optional = True
|
||||
disallow_untyped_defs = True
|
||||
strict_equality = True
|
||||
warn_redundant_casts = True
|
||||
# Run mypy type checking with the minimum supported Python version to catch new usage
|
||||
# that isn't backwards-compatible (types, overloads, etc).
|
||||
python_version = 3.8
|
||||
|
||||
files =
|
||||
docker/,
|
||||
@@ -32,6 +29,9 @@ warn_unused_ignores = False
|
||||
[mypy-synapse.util.caches.treecache]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-tests.util.caches.test_descriptors]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
;; Dependencies without annotations
|
||||
;; Before ignoring a module, check to see if type stubs are available.
|
||||
;; The `typeshed` project maintains stubs here:
|
||||
|
||||
162
poetry.lock
generated
162
poetry.lock
generated
@@ -580,20 +580,20 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "lxml-stubs", "memory-profiler",
|
||||
|
||||
[[package]]
|
||||
name = "furo"
|
||||
version = "2023.5.20"
|
||||
version = "2023.3.27"
|
||||
description = "A clean customisable Sphinx documentation theme."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "furo-2023.5.20-py3-none-any.whl", hash = "sha256:594a8436ddfe0c071f3a9e9a209c314a219d8341f3f1af33fdf7c69544fab9e6"},
|
||||
{file = "furo-2023.5.20.tar.gz", hash = "sha256:40e09fa17c6f4b22419d122e933089226dcdb59747b5b6c79363089827dea16f"},
|
||||
{file = "furo-2023.3.27-py3-none-any.whl", hash = "sha256:4ab2be254a2d5e52792d0ca793a12c35582dd09897228a6dd47885dabd5c9521"},
|
||||
{file = "furo-2023.3.27.tar.gz", hash = "sha256:b99e7867a5cc833b2b34d7230631dd6558c7a29f93071fdbb5709634bb33c5a5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
beautifulsoup4 = "*"
|
||||
pygments = ">=2.7"
|
||||
sphinx = ">=6.0,<8.0"
|
||||
sphinx = ">=5.0,<7.0"
|
||||
sphinx-basic-ng = "*"
|
||||
|
||||
[[package]]
|
||||
@@ -1632,14 +1632,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.13.11"
|
||||
version = "8.13.7"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "phonenumbers-8.13.11-py2.py3-none-any.whl", hash = "sha256:107469114fd297258a485bdf8238d0522cb392db1257faf2bf23384ecbdb0e8a"},
|
||||
{file = "phonenumbers-8.13.11.tar.gz", hash = "sha256:3e3274d88cab3609b55ff5b93417075dbca2d13064f103fbf562e0ea1dda0f9a"},
|
||||
{file = "phonenumbers-8.13.7-py2.py3-none-any.whl", hash = "sha256:d3e3555b38c89b121f5b2e917847003bdd07027569d758d5f40156c01aeac089"},
|
||||
{file = "phonenumbers-8.13.7.tar.gz", hash = "sha256:253bb0e01250d21a11f2b42b3e6e161b7f6cb2ac440e2e2a95c1da71d221ee1a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1781,14 +1781,14 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytes
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.17.0"
|
||||
version = "0.16.0"
|
||||
description = "Python client for the Prometheus monitoring system."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "prometheus_client-0.17.0-py3-none-any.whl", hash = "sha256:a77b708cf083f4d1a3fb3ce5c95b4afa32b9c521ae363354a4a910204ea095ce"},
|
||||
{file = "prometheus_client-0.17.0.tar.gz", hash = "sha256:9c3b26f1535945e85b8934fb374678d263137b78ef85f305b1156c7c881cd11b"},
|
||||
{file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"},
|
||||
{file = "prometheus_client-0.16.0.tar.gz", hash = "sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1887,48 +1887,48 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.8"
|
||||
version = "1.10.7"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"},
|
||||
{file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"},
|
||||
{file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"},
|
||||
{file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"},
|
||||
{file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"},
|
||||
{file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"},
|
||||
{file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"},
|
||||
{file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"},
|
||||
{file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"},
|
||||
{file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"},
|
||||
{file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"},
|
||||
{file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"},
|
||||
{file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"},
|
||||
{file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"},
|
||||
{file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"},
|
||||
{file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"},
|
||||
{file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"},
|
||||
{file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
|
||||
{file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
|
||||
{file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1940,14 +1940,14 @@ email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "1.58.2"
|
||||
version = "1.58.1"
|
||||
description = "Use the full Github API v3"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyGithub-1.58.2-py3-none-any.whl", hash = "sha256:f435884af617c6debaa76cbc355372d1027445a56fbc39972a3b9ed4968badc8"},
|
||||
{file = "PyGithub-1.58.2.tar.gz", hash = "sha256:1e6b1b7afe31f75151fb81f7ab6b984a7188a852bdb123dbb9ae90023c3ce60f"},
|
||||
{file = "PyGithub-1.58.1-py3-none-any.whl", hash = "sha256:4e7fe9c3ec30d5fde5b4fbb97f18821c9dbf372bf6df337fe66f6689a65e0a83"},
|
||||
{file = "PyGithub-1.58.1.tar.gz", hash = "sha256:7d528b4ad92bc13122129fafd444ce3d04c47d2d801f6446b6e6ee2d410235b3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2251,21 +2251,21 @@ md = ["cmarkgfm (>=0.8.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.31.0"
|
||||
version = "2.28.2"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.7, <4"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
{file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
|
||||
{file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
@@ -2565,21 +2565,21 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "sphinx"
|
||||
version = "6.2.1"
|
||||
version = "6.1.3"
|
||||
description = "Python documentation generator"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"},
|
||||
{file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"},
|
||||
{file = "Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2"},
|
||||
{file = "sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alabaster = ">=0.7,<0.8"
|
||||
babel = ">=2.9"
|
||||
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
|
||||
docutils = ">=0.18.1,<0.20"
|
||||
docutils = ">=0.18,<0.20"
|
||||
imagesize = ">=1.3"
|
||||
importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
|
||||
Jinja2 = ">=3.0"
|
||||
@@ -2597,7 +2597,7 @@ sphinxcontrib-serializinghtml = ">=1.1.5"
|
||||
[package.extras]
|
||||
docs = ["sphinxcontrib-websupport"]
|
||||
lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"]
|
||||
test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"]
|
||||
test = ["cython", "html5lib", "pytest (>=4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-autodoc2"
|
||||
@@ -2998,26 +2998,26 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-bleach"
|
||||
version = "6.0.0.3"
|
||||
version = "6.0.0.1"
|
||||
description = "Typing stubs for bleach"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-bleach-6.0.0.3.tar.gz", hash = "sha256:8ce7896d4f658c562768674ffcf07492c7730e128018f03edd163ff912bfadee"},
|
||||
{file = "types_bleach-6.0.0.3-py3-none-any.whl", hash = "sha256:d43eaf30a643ca824e16e2dcdb0c87ef9226237e2fa3ac4732a50cb3f32e145f"},
|
||||
{file = "types-bleach-6.0.0.1.tar.gz", hash = "sha256:43d9129deb9e82918747437edf78f09ff440f2973f4702625b61994f3e698518"},
|
||||
{file = "types_bleach-6.0.0.1-py3-none-any.whl", hash = "sha256:440df967254007be80bb0f4d851f026c29c709cc48359bf4935d2b2f3a6f9f90"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-commonmark"
|
||||
version = "0.9.2.3"
|
||||
version = "0.9.2.2"
|
||||
description = "Typing stubs for commonmark"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-commonmark-0.9.2.3.tar.gz", hash = "sha256:42769a2c194fd5b49fd9eedfd4a83cd1d2514c6d0a36f00f5c5ffe0b6a2d2fcf"},
|
||||
{file = "types_commonmark-0.9.2.3-py3-none-any.whl", hash = "sha256:b575156e1b8a292d43acb36f861110b85c4bc7aa53bbfb5ac64addec15d18cfa"},
|
||||
{file = "types-commonmark-0.9.2.2.tar.gz", hash = "sha256:f3259350634c2ce68ae503398430482f7cf44e5cae3d344995e916fbf453b4be"},
|
||||
{file = "types_commonmark-0.9.2.2-py3-none-any.whl", hash = "sha256:d3d878692615e7fbe47bf19ba67497837b135812d665012a3d42219c1f2c3a61"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3058,26 +3058,26 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "9.5.0.4"
|
||||
version = "9.5.0.2"
|
||||
description = "Typing stubs for Pillow"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-Pillow-9.5.0.4.tar.gz", hash = "sha256:f1b6af47abd151847ee25911ffeba784899bc7dc7f9eba8ca6a5aac522b012ef"},
|
||||
{file = "types_Pillow-9.5.0.4-py3-none-any.whl", hash = "sha256:69427d9fa4320ff6e30f00fb9c0dd71185dc0a16de4757774220104759483466"},
|
||||
{file = "types-Pillow-9.5.0.2.tar.gz", hash = "sha256:b3f9f621f259566c19c1deca21901017c8b1e3e200ed2e49e0a2d83c0a5175db"},
|
||||
{file = "types_Pillow-9.5.0.2-py3-none-any.whl", hash = "sha256:58fdebd0ffa2353ecccdd622adde23bce89da5c0c8b96c34f2d1eca7b7e42d0e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.10"
|
||||
version = "2.9.21.9"
|
||||
description = "Typing stubs for psycopg2"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-psycopg2-2.9.21.10.tar.gz", hash = "sha256:c2600892312ae1c34e12f145749795d93dc4eac3ef7dbf8a9c1bfd45385e80d7"},
|
||||
{file = "types_psycopg2-2.9.21.10-py3-none-any.whl", hash = "sha256:918224a0731a3650832e46633e720703b5beef7693a064e777d9748654fcf5e5"},
|
||||
{file = "types-psycopg2-2.9.21.9.tar.gz", hash = "sha256:388dc36a04551632289c4aaf1fc5b91e147654b165db896d094844e216f22bf5"},
|
||||
{file = "types_psycopg2-2.9.21.9-py3-none-any.whl", hash = "sha256:0332525fb9d3031d3da46f091e7d40b2c4d4958e9c00d2b4c1eaaa9f8ef9de4e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3097,26 +3097,26 @@ cryptography = ">=35.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "types-pyyaml"
|
||||
version = "6.0.12.10"
|
||||
version = "6.0.12.9"
|
||||
description = "Typing stubs for PyYAML"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"},
|
||||
{file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"},
|
||||
{file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"},
|
||||
{file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.31.0.0"
|
||||
version = "2.30.0.0"
|
||||
description = "Typing stubs for requests"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-requests-2.31.0.0.tar.gz", hash = "sha256:c1c29d20ab8d84dff468d7febfe8e0cb0b4664543221b386605e14672b44ea25"},
|
||||
{file = "types_requests-2.31.0.0-py3-none-any.whl", hash = "sha256:7c5cea7940f8e92ec560bbc468f65bf684aa3dcf0554a6f8c4710f5f708dc598"},
|
||||
{file = "types-requests-2.30.0.0.tar.gz", hash = "sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31"},
|
||||
{file = "types_requests-2.30.0.0-py3-none-any.whl", hash = "sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3124,14 +3124,14 @@ types-urllib3 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "67.8.0.0"
|
||||
version = "67.7.0.1"
|
||||
description = "Typing stubs for setuptools"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"},
|
||||
{file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"},
|
||||
{file = "types-setuptools-67.7.0.1.tar.gz", hash = "sha256:980a2651b2b019809817e1585071596b87fbafcb54433ff3b12445461db23790"},
|
||||
{file = "types_setuptools-67.7.0.1-py3-none-any.whl", hash = "sha256:471a4ecf6984ffada63ffcfa884bfcb62718bd2d1a1acf8ee5513ec99789ed5e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.85.2"
|
||||
version = "1.83.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -368,7 +368,7 @@ furo = ">=2022.12.7,<2024.0.0"
|
||||
# system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=1.1.0,<=1.6.0", "setuptools_rust>=1.3,<=1.6.0"]
|
||||
requires = ["poetry-core>=1.0.0,<=1.5.0", "setuptools_rust>=1.3,<=1.5.2"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
|
||||
@@ -18,11 +18,10 @@ can crop up, e.g the cache descriptors.
|
||||
|
||||
from typing import Callable, Optional, Type
|
||||
|
||||
from mypy.erasetype import remove_instance_last_known_values
|
||||
from mypy.nodes import ARG_NAMED_OPT
|
||||
from mypy.plugin import MethodSigContext, Plugin
|
||||
from mypy.typeops import bind_self
|
||||
from mypy.types import CallableType, Instance, NoneType, UnionType
|
||||
from mypy.types import CallableType, NoneType, UnionType
|
||||
|
||||
|
||||
class SynapsePlugin(Plugin):
|
||||
@@ -93,41 +92,10 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
||||
arg_names.append("on_invalidate")
|
||||
arg_kinds.append(ARG_NAMED_OPT) # Arg is an optional kwarg.
|
||||
|
||||
# Finally we ensure the return type is a Deferred.
|
||||
if (
|
||||
isinstance(signature.ret_type, Instance)
|
||||
and signature.ret_type.type.fullname == "twisted.internet.defer.Deferred"
|
||||
):
|
||||
# If it is already a Deferred, nothing to do.
|
||||
ret_type = signature.ret_type
|
||||
else:
|
||||
ret_arg = None
|
||||
if isinstance(signature.ret_type, Instance):
|
||||
# If a coroutine, wrap the coroutine's return type in a Deferred.
|
||||
if signature.ret_type.type.fullname == "typing.Coroutine":
|
||||
ret_arg = signature.ret_type.args[2]
|
||||
|
||||
# If an awaitable, wrap the awaitable's final value in a Deferred.
|
||||
elif signature.ret_type.type.fullname == "typing.Awaitable":
|
||||
ret_arg = signature.ret_type.args[0]
|
||||
|
||||
# Otherwise, wrap the return value in a Deferred.
|
||||
if ret_arg is None:
|
||||
ret_arg = signature.ret_type
|
||||
|
||||
# This should be able to use ctx.api.named_generic_type, but that doesn't seem
|
||||
# to find the correct symbol for anything more than 1 module deep.
|
||||
#
|
||||
# modules is not part of CheckerPluginInterface. The following is a combination
|
||||
# of TypeChecker.named_generic_type and TypeChecker.lookup_typeinfo.
|
||||
sym = ctx.api.modules["twisted.internet.defer"].names.get("Deferred") # type: ignore[attr-defined]
|
||||
ret_type = Instance(sym.node, [remove_instance_last_known_values(ret_arg)])
|
||||
|
||||
signature = signature.copy_modified(
|
||||
arg_types=arg_types,
|
||||
arg_names=arg_names,
|
||||
arg_kinds=arg_kinds,
|
||||
ret_type=ret_type,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
@@ -27,7 +27,7 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, List, Match, Optional, Union
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -233,7 +233,7 @@ def _prepare() -> None:
|
||||
subprocess.check_output(["poetry", "version", new_version])
|
||||
|
||||
# Generate changelogs.
|
||||
generate_and_write_changelog(synapse_repo, current_version, new_version)
|
||||
generate_and_write_changelog(current_version, new_version)
|
||||
|
||||
# Generate debian changelogs
|
||||
if parsed_new_version.pre is not None:
|
||||
@@ -814,7 +814,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
|
||||
|
||||
def generate_and_write_changelog(
|
||||
repo: Repo, current_version: version.Version, new_version: str
|
||||
current_version: version.Version, new_version: str
|
||||
) -> None:
|
||||
# We do this by getting a draft so that we can edit it before writing to the
|
||||
# changelog.
|
||||
@@ -827,10 +827,6 @@ def generate_and_write_changelog(
|
||||
new_changes = new_changes.replace(
|
||||
"No significant changes.", f"No significant changes since {current_version}."
|
||||
)
|
||||
new_changes += build_dependabot_changelog(
|
||||
repo,
|
||||
current_version,
|
||||
)
|
||||
|
||||
# Prepend changes to changelog
|
||||
with open("CHANGES.md", "r+") as f:
|
||||
@@ -845,49 +841,5 @@ def generate_and_write_changelog(
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
def build_dependabot_changelog(repo: Repo, current_version: version.Version) -> str:
|
||||
"""Summarise dependabot commits between `current_version` and `release_branch`.
|
||||
|
||||
Returns an empty string if there have been no such commits; otherwise outputs a
|
||||
third-level markdown header followed by an unordered list."""
|
||||
last_release_commit = repo.tag("v" + str(current_version)).commit
|
||||
rev_spec = f"{last_release_commit.hexsha}.."
|
||||
commits = list(git.objects.Commit.iter_items(repo, rev_spec))
|
||||
messages = []
|
||||
for commit in reversed(commits):
|
||||
if commit.author.name == "dependabot[bot]":
|
||||
message: Union[str, bytes] = commit.message
|
||||
if isinstance(message, bytes):
|
||||
message = message.decode("utf-8")
|
||||
messages.append(message.split("\n", maxsplit=1)[0])
|
||||
|
||||
if not messages:
|
||||
print(f"No dependabot commits in range {rev_spec}", file=sys.stderr)
|
||||
return ""
|
||||
|
||||
messages.sort()
|
||||
|
||||
def replacer(match: Match[str]) -> str:
|
||||
desc = match.group(1)
|
||||
number = match.group(2)
|
||||
return f"* {desc}. ([\\#{number}](https://github.com/matrix-org/synapse/issues/{number}))"
|
||||
|
||||
for i, message in enumerate(messages):
|
||||
messages[i] = re.sub(r"(.*) \(#(\d+)\)$", replacer, message)
|
||||
messages.insert(0, "### Updates to locked dependencies\n")
|
||||
# Add an extra blank line to the bottom of the section
|
||||
messages.append("")
|
||||
return "\n".join(messages)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("since")
|
||||
def test_dependabot_changelog(since: str) -> None:
|
||||
"""Test building the dependabot changelog.
|
||||
|
||||
Summarises all dependabot commits between the SINCE tag and the current git HEAD."""
|
||||
print(build_dependabot_changelog(git.Repo("."), version.Version(since)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
|
||||
@@ -61,9 +61,6 @@ def lazyConnection(
|
||||
# most methods to it via ConnectionHandler.__getattr__.
|
||||
class ConnectionHandler(RedisProtocol):
|
||||
def disconnect(self) -> "Deferred[None]": ...
|
||||
def __repr__(self) -> str: ...
|
||||
|
||||
class UnixConnectionHandler(ConnectionHandler): ...
|
||||
|
||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
|
||||
@@ -128,7 +128,20 @@ USER_FILTER_SCHEMA = {
|
||||
"account_data": {"$ref": "#/definitions/filter"},
|
||||
"room": {"$ref": "#/definitions/room_filter"},
|
||||
"event_format": {"type": "string", "enum": ["client", "federation"]},
|
||||
"event_fields": {"type": "array", "items": {"type": "string"}},
|
||||
"event_fields": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
# Don't allow '\\' in event field filters. This makes matching
|
||||
# events a lot easier as we can then use a negative lookbehind
|
||||
# assertion to split '\.' If we allowed \\ then it would
|
||||
# incorrectly split '\\.' See synapse.events.utils.serialize_event
|
||||
#
|
||||
# Note that because this is a regular expression, we have to escape
|
||||
# each backslash in the pattern.
|
||||
"pattern": r"^((?!\\\\).)*$",
|
||||
},
|
||||
},
|
||||
},
|
||||
"additionalProperties": True, # Allow new fields for forward compatibility
|
||||
}
|
||||
|
||||
@@ -96,15 +96,11 @@ class RoomVersion:
|
||||
msc2716_historical: bool
|
||||
# MSC2716: Adds support for redacting "insertion", "chunk", and "marker" events
|
||||
msc2716_redactions: bool
|
||||
# MSC3389: Protect relation information from redaction.
|
||||
msc3389_relation_redactions: bool
|
||||
# MSC3787: Adds support for a `knock_restricted` join rule, mixing concepts of
|
||||
# knocks and restricted join rules into the same join condition.
|
||||
msc3787_knock_restricted_join_rule: bool
|
||||
# MSC3667: Enforce integer power levels
|
||||
msc3667_int_only_power_levels: bool
|
||||
# MSC3821: Do not redact the third_party_invite content field for membership events.
|
||||
msc3821_redaction_rules: bool
|
||||
# MSC3931: Adds a push rule condition for "room version feature flags", making
|
||||
# some push rules room version dependent. Note that adding a flag to this list
|
||||
# is not enough to mark it "supported": the push rule evaluator also needs to
|
||||
@@ -132,10 +128,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -155,10 +149,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -178,10 +170,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -201,10 +191,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -224,10 +212,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -247,10 +233,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -270,10 +254,8 @@ class RoomVersions:
|
||||
msc2403_knocking=False,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -293,10 +275,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -316,10 +296,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -339,10 +317,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -362,33 +338,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
MSC3821 = RoomVersion(
|
||||
"org.matrix.msc3821.opt1",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.ROOM_V4_PLUS,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=True,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -408,10 +359,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -431,10 +380,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=True,
|
||||
msc2716_redactions=True,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -455,10 +402,8 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
@@ -478,37 +423,11 @@ class RoomVersions:
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3821_redaction_rules=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=True,
|
||||
)
|
||||
MSC3820opt2 = RoomVersion(
|
||||
# Based upon v10
|
||||
"org.matrix.msc3820.opt2",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.ROOM_V4_PLUS,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=True, # Used by MSC3820
|
||||
msc2176_redaction_rules=True, # Used by MSC3820
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3389_relation_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3821_redaction_rules=True, # Used by MSC3820
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=True, # Used by MSC3820
|
||||
)
|
||||
|
||||
|
||||
KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
@@ -528,7 +447,6 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
RoomVersions.V10,
|
||||
RoomVersions.MSC2716v4,
|
||||
RoomVersions.MSC3989,
|
||||
RoomVersions.MSC3820opt2,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ import socket
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from textwrap import indent
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -213,12 +212,8 @@ def handle_startup_exception(e: Exception) -> NoReturn:
|
||||
# Exceptions that occur between setting up the logging and forking or starting
|
||||
# the reactor are written to the logs, followed by a summary to stderr.
|
||||
logger.exception("Exception during startup")
|
||||
|
||||
error_string = "".join(traceback.format_exception(type(e), e, e.__traceback__))
|
||||
indented_error_string = indent(error_string, " ")
|
||||
|
||||
quit_with_error(
|
||||
f"Error during initialisation:\n{indented_error_string}\nThere may be more information in the logs."
|
||||
f"Error during initialisation:\n {e}\nThere may be more information in the logs."
|
||||
)
|
||||
|
||||
|
||||
@@ -386,6 +381,7 @@ def listen_unix(
|
||||
|
||||
|
||||
def listen_http(
|
||||
hs: "HomeServer",
|
||||
listener_config: ListenerConfig,
|
||||
root_resource: Resource,
|
||||
version_string: str,
|
||||
@@ -406,6 +402,7 @@ def listen_http(
|
||||
version_string,
|
||||
max_request_body_size=max_request_body_size,
|
||||
reactor=reactor,
|
||||
federation_agent=hs.get_federation_http_client().agent,
|
||||
)
|
||||
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
|
||||
@@ -64,7 +64,7 @@ from synapse.util.logcontext import LoggingContext
|
||||
logger = logging.getLogger("synapse.app.admin_cmd")
|
||||
|
||||
|
||||
class AdminCmdStore(
|
||||
class AdminCmdSlavedStore(
|
||||
FilteringWorkerStore,
|
||||
ClientIpWorkerStore,
|
||||
DeviceWorkerStore,
|
||||
@@ -103,7 +103,7 @@ class AdminCmdStore(
|
||||
|
||||
|
||||
class AdminCmdServer(HomeServer):
|
||||
DATASTORE_CLASS = AdminCmdStore # type: ignore
|
||||
DATASTORE_CLASS = AdminCmdSlavedStore # type: ignore
|
||||
|
||||
|
||||
async def export_data_command(hs: HomeServer, args: argparse.Namespace) -> None:
|
||||
|
||||
@@ -102,7 +102,7 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
logger = logging.getLogger("synapse.app.generic_worker")
|
||||
|
||||
|
||||
class GenericWorkerStore(
|
||||
class GenericWorkerSlavedStore(
|
||||
# FIXME(#3714): We need to add UserDirectoryStore as we write directly
|
||||
# rather than going via the correct worker.
|
||||
UserDirectoryStore,
|
||||
@@ -154,7 +154,7 @@ class GenericWorkerStore(
|
||||
|
||||
|
||||
class GenericWorkerServer(HomeServer):
|
||||
DATASTORE_CLASS = GenericWorkerStore # type: ignore
|
||||
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
|
||||
|
||||
def _listen_http(self, listener_config: ListenerConfig) -> None:
|
||||
assert listener_config.http_options is not None
|
||||
@@ -223,6 +223,7 @@ class GenericWorkerServer(HomeServer):
|
||||
root_resource = create_resource_tree(resources, OptionsResource())
|
||||
|
||||
_base.listen_http(
|
||||
self,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
|
||||
@@ -139,6 +139,7 @@ class SynapseHomeServer(HomeServer):
|
||||
root_resource = OptionsResource()
|
||||
|
||||
ports = listen_http(
|
||||
self,
|
||||
listener_config,
|
||||
create_resource_tree(resources, root_resource),
|
||||
self.version_string,
|
||||
|
||||
@@ -127,6 +127,10 @@ async def phone_stats_home(
|
||||
daily_sent_messages = await store.count_daily_sent_messages()
|
||||
stats["daily_sent_messages"] = daily_sent_messages
|
||||
|
||||
r30_results = await store.count_r30_users()
|
||||
for name, count in r30_results.items():
|
||||
stats["r30_users_" + name] = count
|
||||
|
||||
r30v2_results = await store.count_r30v2_users()
|
||||
for name, count in r30v2_results.items():
|
||||
stats["r30v2_users_" + name] = count
|
||||
|
||||
@@ -86,7 +86,6 @@ class ApplicationService:
|
||||
url.rstrip("/") if isinstance(url, str) else None
|
||||
) # url must not end with a slash
|
||||
self.hs_token = hs_token
|
||||
# The full Matrix ID for this application service's sender.
|
||||
self.sender = sender
|
||||
self.namespaces = self._check_namespaces(namespaces)
|
||||
self.id = id
|
||||
@@ -213,7 +212,7 @@ class ApplicationService:
|
||||
True if the application service is interested in the user, False if not.
|
||||
"""
|
||||
return (
|
||||
# User is the appservice's configured sender_localpart user
|
||||
# User is the appservice's sender_localpart user
|
||||
user_id == self.sender
|
||||
# User is in the appservice's user namespace
|
||||
or self.is_user_in_namespace(user_id)
|
||||
|
||||
@@ -44,7 +44,6 @@ import jinja2
|
||||
import pkg_resources
|
||||
import yaml
|
||||
|
||||
from synapse.types import StrSequence
|
||||
from synapse.util.templates import _create_mxc_to_http_filter, _format_ts_filter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -59,7 +58,7 @@ class ConfigError(Exception):
|
||||
the problem lies.
|
||||
"""
|
||||
|
||||
def __init__(self, msg: str, path: Optional[StrSequence] = None):
|
||||
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
|
||||
self.msg = msg
|
||||
self.path = path
|
||||
|
||||
|
||||
@@ -61,10 +61,9 @@ from synapse.config import ( # noqa: F401
|
||||
voip,
|
||||
workers,
|
||||
)
|
||||
from synapse.types import StrSequence
|
||||
|
||||
class ConfigError(Exception):
|
||||
def __init__(self, msg: str, path: Optional[StrSequence] = None):
|
||||
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
|
||||
self.msg = msg
|
||||
self.path = path
|
||||
|
||||
|
||||
@@ -11,17 +11,17 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import Any, Dict, Type, TypeVar
|
||||
from typing import Any, Dict, Iterable, Type, TypeVar
|
||||
|
||||
import jsonschema
|
||||
from pydantic import BaseModel, ValidationError, parse_obj_as
|
||||
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.types import JsonDict, StrSequence
|
||||
from synapse.types import JsonDict
|
||||
|
||||
|
||||
def validate_config(
|
||||
json_schema: JsonDict, config: Any, config_path: StrSequence
|
||||
json_schema: JsonDict, config: Any, config_path: Iterable[str]
|
||||
) -> None:
|
||||
"""Validates a config setting against a JsonSchema definition
|
||||
|
||||
@@ -45,7 +45,7 @@ def validate_config(
|
||||
|
||||
|
||||
def json_error_to_config_error(
|
||||
e: jsonschema.ValidationError, config_path: StrSequence
|
||||
e: jsonschema.ValidationError, config_path: Iterable[str]
|
||||
) -> ConfigError:
|
||||
"""Converts a json validation error to a user-readable ConfigError
|
||||
|
||||
|
||||
@@ -36,10 +36,11 @@ class AppServiceConfig(Config):
|
||||
if not isinstance(self.app_service_config_files, list) or not all(
|
||||
type(x) is str for x in self.app_service_config_files
|
||||
):
|
||||
# type-ignore: this function gets arbitrary json value; we do use this path.
|
||||
raise ConfigError(
|
||||
"Expected '%s' to be a list of AS config files:"
|
||||
% (self.app_service_config_files),
|
||||
("app_service_config_files",),
|
||||
"app_service_config_files",
|
||||
)
|
||||
|
||||
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
|
||||
|
||||
@@ -84,6 +84,18 @@ class ExperimentalConfig(Config):
|
||||
"msc3984_appservice_key_query", False
|
||||
)
|
||||
|
||||
# MSC3706 (server-side support for partial state in /send_join responses)
|
||||
# Synapse will always serve partial state responses to requests using the stable
|
||||
# query parameter `omit_members`. If this flag is set, Synapse will also serve
|
||||
# partial state responses to requests using the unstable query parameter
|
||||
# `org.matrix.msc3706.partial_state`.
|
||||
self.msc3706_enabled: bool = experimental.get("msc3706_enabled", False)
|
||||
|
||||
# experimental support for faster joins over federation
|
||||
# (MSC2775, MSC3706, MSC3895)
|
||||
# requires a target server that can provide a partial join response (MSC3706)
|
||||
self.faster_joins_enabled: bool = experimental.get("faster_joins", True)
|
||||
|
||||
# MSC3720 (Account status endpoint)
|
||||
self.msc3720_enabled: bool = experimental.get("msc3720_enabled", False)
|
||||
|
||||
|
||||
@@ -117,7 +117,9 @@ root:
|
||||
# Write logs to the `buffer` handler, which will buffer them together in memory,
|
||||
# then write them to a file.
|
||||
#
|
||||
# Replace "buffer" with "console" to log to stderr instead.
|
||||
# Replace "buffer" with "console" to log to stderr instead. (Note that you'll
|
||||
# also need to update the configuration for the `twisted` logger above, in
|
||||
# this case.)
|
||||
#
|
||||
handlers: [buffer]
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from urllib import parse as urlparse
|
||||
import attr
|
||||
import pkg_resources
|
||||
|
||||
from synapse.types import JsonDict, StrSequence
|
||||
from synapse.types import JsonDict
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
from ._util import validate_config
|
||||
@@ -80,7 +80,7 @@ class OembedConfig(Config):
|
||||
)
|
||||
|
||||
def _parse_and_validate_provider(
|
||||
self, providers: List[JsonDict], config_path: StrSequence
|
||||
self, providers: List[JsonDict], config_path: Iterable[str]
|
||||
) -> Iterable[OEmbedEndpointConfig]:
|
||||
# Ensure it is the proper form.
|
||||
validate_config(
|
||||
@@ -112,7 +112,7 @@ class OembedConfig(Config):
|
||||
api_endpoint, patterns, endpoint.get("formats")
|
||||
)
|
||||
|
||||
def _glob_to_pattern(self, glob: str, config_path: StrSequence) -> Pattern:
|
||||
def _glob_to_pattern(self, glob: str, config_path: Iterable[str]) -> Pattern:
|
||||
"""
|
||||
Convert the glob into a sane regular expression to match against. The
|
||||
rules followed will be slightly different for the domain portion vs.
|
||||
|
||||
@@ -33,12 +33,5 @@ class RedisConfig(Config):
|
||||
|
||||
self.redis_host = redis_config.get("host", "localhost")
|
||||
self.redis_port = redis_config.get("port", 6379)
|
||||
self.redis_path = redis_config.get("path", None)
|
||||
self.redis_dbid = redis_config.get("dbid", None)
|
||||
self.redis_password = redis_config.get("password")
|
||||
|
||||
self.redis_use_tls = redis_config.get("use_tls", False)
|
||||
self.redis_certificate = redis_config.get("certificate_file", None)
|
||||
self.redis_private_key = redis_config.get("private_key_file", None)
|
||||
self.redis_ca_file = redis_config.get("ca_file", None)
|
||||
self.redis_ca_path = redis_config.get("ca_path", None)
|
||||
|
||||
@@ -224,20 +224,20 @@ class ContentRepositoryConfig(Config):
|
||||
if "http" in proxy_env or "https" in proxy_env:
|
||||
logger.warning("".join(HTTP_PROXY_SET_WARNING))
|
||||
|
||||
# we always block '0.0.0.0' and '::', which are supposed to be
|
||||
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
||||
# unroutable addresses.
|
||||
self.url_preview_ip_range_blocklist = generate_ip_set(
|
||||
self.url_preview_ip_range_blacklist = generate_ip_set(
|
||||
config["url_preview_ip_range_blacklist"],
|
||||
["0.0.0.0", "::"],
|
||||
config_path=("url_preview_ip_range_blacklist",),
|
||||
)
|
||||
|
||||
self.url_preview_ip_range_allowlist = generate_ip_set(
|
||||
self.url_preview_ip_range_whitelist = generate_ip_set(
|
||||
config.get("url_preview_ip_range_whitelist", ()),
|
||||
config_path=("url_preview_ip_range_whitelist",),
|
||||
)
|
||||
|
||||
self.url_preview_url_blocklist = config.get("url_preview_url_blacklist", ())
|
||||
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
|
||||
|
||||
self.url_preview_accept_language = config.get(
|
||||
"url_preview_accept_language"
|
||||
|
||||
@@ -27,7 +27,7 @@ from netaddr import AddrFormatError, IPNetwork, IPSet
|
||||
from twisted.conch.ssh.keys import Key
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.types import JsonDict, StrSequence
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.module_loader import load_module
|
||||
from synapse.util.stringutils import parse_and_validate_server_name
|
||||
|
||||
@@ -73,7 +73,7 @@ def _6to4(network: IPNetwork) -> IPNetwork:
|
||||
def generate_ip_set(
|
||||
ip_addresses: Optional[Iterable[str]],
|
||||
extra_addresses: Optional[Iterable[str]] = None,
|
||||
config_path: Optional[StrSequence] = None,
|
||||
config_path: Optional[Iterable[str]] = None,
|
||||
) -> IPSet:
|
||||
"""
|
||||
Generate an IPSet from a list of IP addresses or CIDRs.
|
||||
@@ -115,7 +115,7 @@ def generate_ip_set(
|
||||
|
||||
|
||||
# IP ranges that are considered private / unroutable / don't make sense.
|
||||
DEFAULT_IP_RANGE_BLOCKLIST = [
|
||||
DEFAULT_IP_RANGE_BLACKLIST = [
|
||||
# Localhost
|
||||
"127.0.0.0/8",
|
||||
# Private networks.
|
||||
@@ -501,36 +501,36 @@ class ServerConfig(Config):
|
||||
# due to resource constraints
|
||||
self.admin_contact = config.get("admin_contact", None)
|
||||
|
||||
ip_range_blocklist = config.get(
|
||||
"ip_range_blacklist", DEFAULT_IP_RANGE_BLOCKLIST
|
||||
ip_range_blacklist = config.get(
|
||||
"ip_range_blacklist", DEFAULT_IP_RANGE_BLACKLIST
|
||||
)
|
||||
|
||||
# Attempt to create an IPSet from the given ranges
|
||||
|
||||
# Always block 0.0.0.0, ::
|
||||
self.ip_range_blocklist = generate_ip_set(
|
||||
ip_range_blocklist, ["0.0.0.0", "::"], config_path=("ip_range_blacklist",)
|
||||
# Always blacklist 0.0.0.0, ::
|
||||
self.ip_range_blacklist = generate_ip_set(
|
||||
ip_range_blacklist, ["0.0.0.0", "::"], config_path=("ip_range_blacklist",)
|
||||
)
|
||||
|
||||
self.ip_range_allowlist = generate_ip_set(
|
||||
self.ip_range_whitelist = generate_ip_set(
|
||||
config.get("ip_range_whitelist", ()), config_path=("ip_range_whitelist",)
|
||||
)
|
||||
# The federation_ip_range_blacklist is used for backwards-compatibility
|
||||
# and only applies to federation and identity servers.
|
||||
if "federation_ip_range_blacklist" in config:
|
||||
# Always block 0.0.0.0, ::
|
||||
self.federation_ip_range_blocklist = generate_ip_set(
|
||||
# Always blacklist 0.0.0.0, ::
|
||||
self.federation_ip_range_blacklist = generate_ip_set(
|
||||
config["federation_ip_range_blacklist"],
|
||||
["0.0.0.0", "::"],
|
||||
config_path=("federation_ip_range_blacklist",),
|
||||
)
|
||||
# 'federation_ip_range_whitelist' was never a supported configuration option.
|
||||
self.federation_ip_range_allowlist = None
|
||||
self.federation_ip_range_whitelist = None
|
||||
else:
|
||||
# No backwards-compatiblity requrired, as federation_ip_range_blacklist
|
||||
# is not given. Default to ip_range_blacklist and ip_range_whitelist.
|
||||
self.federation_ip_range_blocklist = self.ip_range_blocklist
|
||||
self.federation_ip_range_allowlist = self.ip_range_allowlist
|
||||
self.federation_ip_range_blacklist = self.ip_range_blacklist
|
||||
self.federation_ip_range_whitelist = self.ip_range_whitelist
|
||||
|
||||
# (undocumented) option for torturing the worker-mode replication a bit,
|
||||
# for testing. The value defines the number of milliseconds to pause before
|
||||
|
||||
@@ -39,19 +39,6 @@ The '%s' configuration option is deprecated and will be removed in a future
|
||||
Synapse version. Please use ``%s: name_of_worker`` instead.
|
||||
"""
|
||||
|
||||
_MISSING_MAIN_PROCESS_INSTANCE_MAP_DATA = """
|
||||
Missing data for a worker to connect to main process. Please include '%s' in the
|
||||
`instance_map` declared in your shared yaml configuration, or optionally(as a deprecated
|
||||
solution) in every worker's yaml as various `worker_replication_*` settings as defined
|
||||
in workers documentation here:
|
||||
`https://matrix-org.github.io/synapse/latest/workers.html#worker-configuration`
|
||||
"""
|
||||
# This allows for a handy knob when it's time to change from 'master' to
|
||||
# something with less 'history'
|
||||
MAIN_PROCESS_INSTANCE_NAME = "master"
|
||||
# Use this to adjust what the main process is known as in the yaml instance_map
|
||||
MAIN_PROCESS_INSTANCE_MAP_NAME = "main"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -174,15 +161,27 @@ class WorkerConfig(Config):
|
||||
raise ConfigError("worker_log_config must be a string")
|
||||
self.worker_log_config = worker_log_config
|
||||
|
||||
# The host used to connect to the main synapse
|
||||
self.worker_replication_host = config.get("worker_replication_host", None)
|
||||
|
||||
# The port on the main synapse for TCP replication
|
||||
if "worker_replication_port" in config:
|
||||
raise ConfigError(DIRECT_TCP_ERROR, ("worker_replication_port",))
|
||||
|
||||
# The port on the main synapse for HTTP replication endpoint
|
||||
self.worker_replication_http_port = config.get("worker_replication_http_port")
|
||||
|
||||
# The tls mode on the main synapse for HTTP replication endpoint.
|
||||
# For backward compatibility this defaults to False.
|
||||
self.worker_replication_http_tls = config.get(
|
||||
"worker_replication_http_tls", False
|
||||
)
|
||||
|
||||
# The shared secret used for authentication when connecting to the main synapse.
|
||||
self.worker_replication_secret = config.get("worker_replication_secret", None)
|
||||
|
||||
self.worker_name = config.get("worker_name", self.worker_app)
|
||||
self.instance_name = self.worker_name or MAIN_PROCESS_INSTANCE_NAME
|
||||
self.instance_name = self.worker_name or "master"
|
||||
|
||||
# FIXME: Remove this check after a suitable amount of time.
|
||||
self.worker_main_http_uri = config.get("worker_main_http_uri", None)
|
||||
@@ -216,55 +215,12 @@ class WorkerConfig(Config):
|
||||
)
|
||||
|
||||
# A map from instance name to host/port of their HTTP replication endpoint.
|
||||
# Check if the main process is declared. Inject it into the map if it's not,
|
||||
# based first on if a 'main' block is declared then on 'worker_replication_*'
|
||||
# data. If both are available, default to instance_map. The main process
|
||||
# itself doesn't need this data as it would never have to talk to itself.
|
||||
instance_map: Dict[str, Any] = config.get("instance_map", {})
|
||||
|
||||
if self.instance_name is not MAIN_PROCESS_INSTANCE_NAME:
|
||||
# The host used to connect to the main synapse
|
||||
main_host = config.get("worker_replication_host", None)
|
||||
|
||||
# The port on the main synapse for HTTP replication endpoint
|
||||
main_port = config.get("worker_replication_http_port")
|
||||
|
||||
# The tls mode on the main synapse for HTTP replication endpoint.
|
||||
# For backward compatibility this defaults to False.
|
||||
main_tls = config.get("worker_replication_http_tls", False)
|
||||
|
||||
# For now, accept 'main' in the instance_map, but the replication system
|
||||
# expects 'master', force that into being until it's changed later.
|
||||
if MAIN_PROCESS_INSTANCE_MAP_NAME in instance_map:
|
||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = instance_map[
|
||||
MAIN_PROCESS_INSTANCE_MAP_NAME
|
||||
]
|
||||
del instance_map[MAIN_PROCESS_INSTANCE_MAP_NAME]
|
||||
|
||||
# This is the backwards compatibility bit that handles the
|
||||
# worker_replication_* bits using setdefault() to not overwrite anything.
|
||||
elif main_host is not None and main_port is not None:
|
||||
instance_map.setdefault(
|
||||
MAIN_PROCESS_INSTANCE_NAME,
|
||||
{
|
||||
"host": main_host,
|
||||
"port": main_port,
|
||||
"tls": main_tls,
|
||||
},
|
||||
)
|
||||
|
||||
else:
|
||||
# If we've gotten here, it means that the main process is not on the
|
||||
# instance_map and that not enough worker_replication_* variables
|
||||
# were declared in the worker's yaml.
|
||||
raise ConfigError(
|
||||
_MISSING_MAIN_PROCESS_INSTANCE_MAP_DATA
|
||||
% MAIN_PROCESS_INSTANCE_MAP_NAME
|
||||
)
|
||||
|
||||
self.instance_map: Dict[
|
||||
str, InstanceLocationConfig
|
||||
] = parse_and_validate_mapping(instance_map, InstanceLocationConfig)
|
||||
] = parse_and_validate_mapping(
|
||||
config.get("instance_map", {}),
|
||||
InstanceLocationConfig,
|
||||
)
|
||||
|
||||
# Map from type of streams to source, c.f. WriterLocations.
|
||||
writers = config.get("stream_writers") or {}
|
||||
|
||||
@@ -1054,15 +1054,10 @@ def _verify_third_party_invite(
|
||||
"""
|
||||
if "third_party_invite" not in event.content:
|
||||
return False
|
||||
third_party_invite = event.content["third_party_invite"]
|
||||
if not isinstance(third_party_invite, collections.abc.Mapping):
|
||||
if "signed" not in event.content["third_party_invite"]:
|
||||
return False
|
||||
if "signed" not in third_party_invite:
|
||||
return False
|
||||
signed = third_party_invite["signed"]
|
||||
if not isinstance(signed, collections.abc.Mapping):
|
||||
return False
|
||||
for key in {"mxid", "token", "signatures"}:
|
||||
signed = event.content["third_party_invite"]["signed"]
|
||||
for key in {"mxid", "token"}:
|
||||
if key not in signed:
|
||||
return False
|
||||
|
||||
@@ -1080,6 +1075,8 @@ def _verify_third_party_invite(
|
||||
|
||||
if signed["mxid"] != event.state_key:
|
||||
return False
|
||||
if signed["token"] != token:
|
||||
return False
|
||||
|
||||
for public_key_object in get_public_keys(invite_event):
|
||||
public_key = public_key_object["public_key"]
|
||||
@@ -1091,9 +1088,7 @@ def _verify_third_party_invite(
|
||||
verify_key = decode_verify_key_bytes(
|
||||
key_name, decode_base64(public_key)
|
||||
)
|
||||
# verify_signed_json incorrectly states it wants a dict, it
|
||||
# just needs a mapping.
|
||||
verify_signed_json(signed, server, verify_key) # type: ignore[arg-type]
|
||||
verify_signed_json(signed, server, verify_key)
|
||||
|
||||
# We got the public key from the invite, so we know that the
|
||||
# correct server signed the signed bundle.
|
||||
|
||||
@@ -19,7 +19,6 @@ from immutabledict import immutabledict
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.events import EventBase
|
||||
from synapse.logging.opentracing import tag_args, trace
|
||||
from synapse.types import JsonDict, StateMap
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -243,8 +242,6 @@ class EventContext(UnpersistedEventContextBase):
|
||||
|
||||
return self._state_group
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
async def get_current_state_ids(
|
||||
self, state_filter: Optional["StateFilter"] = None
|
||||
) -> Optional[StateMap[str]]:
|
||||
@@ -278,8 +275,6 @@ class EventContext(UnpersistedEventContextBase):
|
||||
|
||||
return prev_state_ids
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
async def get_prev_state_ids(
|
||||
self, state_filter: Optional["StateFilter"] = None
|
||||
) -> StateMap[str]:
|
||||
|
||||
@@ -22,7 +22,6 @@ from typing import (
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Match,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Union,
|
||||
@@ -47,10 +46,12 @@ if TYPE_CHECKING:
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
|
||||
|
||||
# Split strings on "." but not "\." (or "\\\.").
|
||||
SPLIT_FIELD_REGEX = re.compile(r"\\*\.")
|
||||
# Find escaped characters, e.g. those with a \ in front of them.
|
||||
ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)")
|
||||
# Split strings on "." but not "\." This uses a negative lookbehind assertion for '\'
|
||||
# (?<!stuff) matches if the current position in the string is not preceded
|
||||
# by a match for 'stuff'.
|
||||
# TODO: This is fast, but fails to handle "foo\\.bar" which should be treated as
|
||||
# the literal fields "foo\" and "bar" but will instead be treated as "foo\\.bar"
|
||||
SPLIT_FIELD_REGEX = re.compile(r"(?<!\\)\.")
|
||||
|
||||
CANONICALJSON_MAX_INT = (2**53) - 1
|
||||
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
|
||||
@@ -129,16 +130,6 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic
|
||||
add_fields("membership")
|
||||
if room_version.msc3375_redaction_rules:
|
||||
add_fields(EventContentFields.AUTHORISING_USER)
|
||||
if room_version.msc3821_redaction_rules:
|
||||
# Preserve the signed field under third_party_invite.
|
||||
third_party_invite = event_dict["content"].get("third_party_invite")
|
||||
if isinstance(third_party_invite, collections.abc.Mapping):
|
||||
new_content["third_party_invite"] = {}
|
||||
if "signed" in third_party_invite:
|
||||
new_content["third_party_invite"]["signed"] = third_party_invite[
|
||||
"signed"
|
||||
]
|
||||
|
||||
elif event_type == EventTypes.Create:
|
||||
# MSC2176 rules state that create events cannot be redacted.
|
||||
if room_version.msc2176_redaction_rules:
|
||||
@@ -180,18 +171,6 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic
|
||||
elif room_version.msc2716_redactions and event_type == EventTypes.MSC2716_MARKER:
|
||||
add_fields(EventContentFields.MSC2716_INSERTION_EVENT_REFERENCE)
|
||||
|
||||
# Protect the rel_type and event_id fields under the m.relates_to field.
|
||||
if room_version.msc3389_relation_redactions:
|
||||
relates_to = event_dict["content"].get("m.relates_to")
|
||||
if isinstance(relates_to, collections.abc.Mapping):
|
||||
new_relates_to = {}
|
||||
for field in ("rel_type", "event_id"):
|
||||
if field in relates_to:
|
||||
new_relates_to[field] = relates_to[field]
|
||||
# Only include a non-empty relates_to field.
|
||||
if new_relates_to:
|
||||
new_content["m.relates_to"] = new_relates_to
|
||||
|
||||
allowed_fields = {k: v for k, v in event_dict.items() if k in allowed_keys}
|
||||
|
||||
allowed_fields["content"] = new_content
|
||||
@@ -252,57 +231,6 @@ def _copy_field(src: JsonDict, dst: JsonDict, field: List[str]) -> None:
|
||||
sub_out_dict[key_to_move] = sub_dict[key_to_move]
|
||||
|
||||
|
||||
def _escape_slash(m: Match[str]) -> str:
|
||||
"""
|
||||
Replacement function; replace a backslash-backslash or backslash-dot with the
|
||||
second character. Leaves any other string alone.
|
||||
"""
|
||||
if m.group(1) in ("\\", "."):
|
||||
return m.group(1)
|
||||
return m.group(0)
|
||||
|
||||
|
||||
def _split_field(field: str) -> List[str]:
|
||||
"""
|
||||
Splits strings on unescaped dots and removes escaping.
|
||||
|
||||
Args:
|
||||
field: A string representing a path to a field.
|
||||
|
||||
Returns:
|
||||
A list of nested fields to traverse.
|
||||
"""
|
||||
|
||||
# Convert the field and remove escaping:
|
||||
#
|
||||
# 1. "content.body.thing\.with\.dots"
|
||||
# 2. ["content", "body", "thing\.with\.dots"]
|
||||
# 3. ["content", "body", "thing.with.dots"]
|
||||
|
||||
# Find all dots (and their preceding backslashes). If the dot is unescaped
|
||||
# then emit a new field part.
|
||||
result = []
|
||||
prev_start = 0
|
||||
for match in SPLIT_FIELD_REGEX.finditer(field):
|
||||
# If the match is an *even* number of characters than the dot was escaped.
|
||||
if len(match.group()) % 2 == 0:
|
||||
continue
|
||||
|
||||
# Add a new part (up to the dot, exclusive) after escaping.
|
||||
result.append(
|
||||
ESCAPE_SEQUENCE_PATTERN.sub(
|
||||
_escape_slash, field[prev_start : match.end() - 1]
|
||||
)
|
||||
)
|
||||
prev_start = match.end()
|
||||
|
||||
# Add any part of the field after the last unescaped dot. (Note that if the
|
||||
# character is a dot this correctly adds a blank string.)
|
||||
result.append(re.sub(r"\\(.)", _escape_slash, field[prev_start:]))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def only_fields(dictionary: JsonDict, fields: List[str]) -> JsonDict:
|
||||
"""Return a new dict with only the fields in 'dictionary' which are present
|
||||
in 'fields'.
|
||||
@@ -310,7 +238,7 @@ def only_fields(dictionary: JsonDict, fields: List[str]) -> JsonDict:
|
||||
If there are no event fields specified then all fields are included.
|
||||
The entries may include '.' characters to indicate sub-fields.
|
||||
So ['content.body'] will include the 'body' field of the 'content' object.
|
||||
A literal '.' or '\' character in a field name may be escaped using a '\'.
|
||||
A literal '.' character in a field name may be escaped using a '\'.
|
||||
|
||||
Args:
|
||||
dictionary: The dictionary to read from.
|
||||
@@ -325,7 +253,13 @@ def only_fields(dictionary: JsonDict, fields: List[str]) -> JsonDict:
|
||||
|
||||
# for each field, convert it:
|
||||
# ["content.body.thing\.with\.dots"] => [["content", "body", "thing\.with\.dots"]]
|
||||
split_fields = [_split_field(f) for f in fields]
|
||||
split_fields = [SPLIT_FIELD_REGEX.split(f) for f in fields]
|
||||
|
||||
# for each element of the output array of arrays:
|
||||
# remove escaping so we can use the right key names.
|
||||
split_fields[:] = [
|
||||
[f.replace(r"\.", r".") for f in field_array] for field_array in split_fields
|
||||
]
|
||||
|
||||
output: JsonDict = {}
|
||||
for field_array in split_fields:
|
||||
|
||||
@@ -236,7 +236,6 @@ class FederationClient(FederationBase):
|
||||
|
||||
async def claim_client_keys(
|
||||
self,
|
||||
user: UserID,
|
||||
destination: str,
|
||||
query: Dict[str, Dict[str, Dict[str, int]]],
|
||||
timeout: Optional[int],
|
||||
@@ -244,7 +243,6 @@ class FederationClient(FederationBase):
|
||||
"""Claims one-time keys for a device hosted on a remote server.
|
||||
|
||||
Args:
|
||||
user: The user id of the requesting user
|
||||
destination: Domain name of the remote homeserver
|
||||
content: The query content.
|
||||
|
||||
@@ -281,7 +279,7 @@ class FederationClient(FederationBase):
|
||||
if use_unstable:
|
||||
try:
|
||||
return await self.transport_layer.claim_client_keys_unstable(
|
||||
user, destination, unstable_content, timeout
|
||||
destination, unstable_content, timeout
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
@@ -297,7 +295,7 @@ class FederationClient(FederationBase):
|
||||
logger.debug("Skipping unstable claim client keys API")
|
||||
|
||||
return await self.transport_layer.claim_client_keys(
|
||||
user, destination, content, timeout
|
||||
destination, content, timeout
|
||||
)
|
||||
|
||||
@trace
|
||||
|
||||
@@ -739,10 +739,12 @@ class FederationServer(FederationBase):
|
||||
"event": event_json,
|
||||
"state": [p.get_pdu_json(time_now) for p in state_events],
|
||||
"auth_chain": [p.get_pdu_json(time_now) for p in auth_chain_events],
|
||||
"org.matrix.msc3706.partial_state": caller_supports_partial_state,
|
||||
"members_omitted": caller_supports_partial_state,
|
||||
}
|
||||
|
||||
if servers_in_room is not None:
|
||||
resp["org.matrix.msc3706.servers_in_room"] = list(servers_in_room)
|
||||
resp["servers_in_room"] = list(servers_in_room)
|
||||
|
||||
return resp
|
||||
|
||||
@@ -45,7 +45,7 @@ from synapse.events import EventBase, make_event_from_dict
|
||||
from synapse.federation.units import Transaction
|
||||
from synapse.http.matrixfederationclient import ByteParser, LegacyJsonSendParser
|
||||
from synapse.http.types import QueryParams
|
||||
from synapse.types import JsonDict, UserID
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util import ExceptionBundle
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -59,6 +59,7 @@ class TransportLayerClient:
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.client = hs.get_federation_http_client()
|
||||
self._faster_joins_enabled = hs.config.experimental.faster_joins_enabled
|
||||
self._is_mine_server_name = hs.is_mine_server_name
|
||||
|
||||
async def get_room_state_ids(
|
||||
@@ -362,8 +363,12 @@ class TransportLayerClient:
|
||||
) -> "SendJoinResponse":
|
||||
path = _create_v2_path("/send_join/%s/%s", room_id, event_id)
|
||||
query_params: Dict[str, str] = {}
|
||||
# lazy-load state on join
|
||||
query_params["omit_members"] = "true" if omit_members else "false"
|
||||
if self._faster_joins_enabled:
|
||||
# lazy-load state on join
|
||||
query_params["org.matrix.msc3706.partial_state"] = (
|
||||
"true" if omit_members else "false"
|
||||
)
|
||||
query_params["omit_members"] = "true" if omit_members else "false"
|
||||
|
||||
return await self.client.put_json(
|
||||
destination=destination,
|
||||
@@ -630,11 +635,7 @@ class TransportLayerClient:
|
||||
)
|
||||
|
||||
async def claim_client_keys(
|
||||
self,
|
||||
user: UserID,
|
||||
destination: str,
|
||||
query_content: JsonDict,
|
||||
timeout: Optional[int],
|
||||
self, destination: str, query_content: JsonDict, timeout: Optional[int]
|
||||
) -> JsonDict:
|
||||
"""Claim one-time keys for a list of devices hosted on a remote server.
|
||||
|
||||
@@ -659,7 +660,6 @@ class TransportLayerClient:
|
||||
}
|
||||
|
||||
Args:
|
||||
user: the user_id of the requesting user
|
||||
destination: The server to query.
|
||||
query_content: The user ids to query.
|
||||
Returns:
|
||||
@@ -676,11 +676,7 @@ class TransportLayerClient:
|
||||
)
|
||||
|
||||
async def claim_client_keys_unstable(
|
||||
self,
|
||||
user: UserID,
|
||||
destination: str,
|
||||
query_content: JsonDict,
|
||||
timeout: Optional[int],
|
||||
self, destination: str, query_content: JsonDict, timeout: Optional[int]
|
||||
) -> JsonDict:
|
||||
"""Claim one-time keys for a list of devices hosted on a remote server.
|
||||
|
||||
@@ -705,7 +701,6 @@ class TransportLayerClient:
|
||||
}
|
||||
|
||||
Args:
|
||||
user: the user_id of the requesting user
|
||||
destination: The server to query.
|
||||
query_content: The user ids to query.
|
||||
Returns:
|
||||
@@ -907,7 +902,9 @@ def _members_omitted_parser(response: SendJoinResponse) -> Generator[None, Any,
|
||||
while True:
|
||||
val = yield
|
||||
if not isinstance(val, bool):
|
||||
raise TypeError("members_omitted must be a boolean")
|
||||
raise TypeError(
|
||||
"members_omitted (formerly org.matrix.msc370c.partial_state) must be a boolean"
|
||||
)
|
||||
response.members_omitted = val
|
||||
|
||||
|
||||
@@ -967,6 +964,14 @@ class SendJoinParser(ByteParser[SendJoinResponse]):
|
||||
]
|
||||
|
||||
if not v1_api:
|
||||
self._coros.append(
|
||||
ijson.items_coro(
|
||||
_members_omitted_parser(self._response),
|
||||
"org.matrix.msc3706.partial_state",
|
||||
use_float="True",
|
||||
)
|
||||
)
|
||||
# The stable field name comes last, so it "wins" if the fields disagree
|
||||
self._coros.append(
|
||||
ijson.items_coro(
|
||||
_members_omitted_parser(self._response),
|
||||
@@ -975,6 +980,14 @@ class SendJoinParser(ByteParser[SendJoinResponse]):
|
||||
)
|
||||
)
|
||||
|
||||
self._coros.append(
|
||||
ijson.items_coro(
|
||||
_servers_in_room_parser(self._response),
|
||||
"org.matrix.msc3706.servers_in_room",
|
||||
use_float="True",
|
||||
)
|
||||
)
|
||||
|
||||
# Again, stable field name comes last
|
||||
self._coros.append(
|
||||
ijson.items_coro(
|
||||
|
||||
@@ -440,6 +440,7 @@ class FederationV2SendJoinServlet(BaseFederationServerServlet):
|
||||
server_name: str,
|
||||
):
|
||||
super().__init__(hs, authenticator, ratelimiter, server_name)
|
||||
self._read_msc3706_query_param = hs.config.experimental.msc3706_enabled
|
||||
|
||||
async def on_PUT(
|
||||
self,
|
||||
@@ -452,7 +453,16 @@ class FederationV2SendJoinServlet(BaseFederationServerServlet):
|
||||
# TODO(paul): assert that event_id parsed from path actually
|
||||
# match those given in content
|
||||
|
||||
partial_state = parse_boolean_from_args(query, "omit_members", default=False)
|
||||
partial_state = False
|
||||
# The stable query parameter wins, if it disagrees with the unstable
|
||||
# parameter for some reason.
|
||||
stable_param = parse_boolean_from_args(query, "omit_members", default=None)
|
||||
if stable_param is not None:
|
||||
partial_state = stable_param
|
||||
elif self._read_msc3706_query_param:
|
||||
partial_state = parse_boolean_from_args(
|
||||
query, "org.matrix.msc3706.partial_state", default=False
|
||||
)
|
||||
|
||||
result = await self.handler.on_send_join_request(
|
||||
origin, content, room_id, caller_supports_partial_state=partial_state
|
||||
|
||||
@@ -52,6 +52,7 @@ from synapse.api.errors import (
|
||||
NotFoundError,
|
||||
StoreError,
|
||||
SynapseError,
|
||||
UserDeactivatedError,
|
||||
)
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.handlers.ui_auth import (
|
||||
@@ -1418,6 +1419,12 @@ class AuthHandler:
|
||||
return None
|
||||
(user_id, password_hash) = lookupres
|
||||
|
||||
# If the password hash is None, the account has likely been deactivated
|
||||
if not password_hash:
|
||||
deactivated = await self.store.get_user_deactivated_status(user_id)
|
||||
if deactivated:
|
||||
raise UserDeactivatedError("This account has been deactivated")
|
||||
|
||||
result = await self.validate_hash(password, password_hash)
|
||||
if not result:
|
||||
logger.warning("Failed password login for user %s", user_id)
|
||||
@@ -1742,11 +1749,8 @@ class AuthHandler:
|
||||
registered.
|
||||
auth_provider_session_id: The session ID from the SSO IdP received during login.
|
||||
"""
|
||||
# If the account has been deactivated, do not proceed with the login.
|
||||
#
|
||||
# This gets checked again when the token is submitted but this lets us
|
||||
# provide an HTML error page to the user (instead of issuing a token and
|
||||
# having it error later).
|
||||
# If the account has been deactivated, do not proceed with the login
|
||||
# flow.
|
||||
deactivated = await self.store.get_user_deactivated_status(registered_user_id)
|
||||
if deactivated:
|
||||
respond_with_html(request, 403, self._sso_account_deactivated_template)
|
||||
|
||||
@@ -661,7 +661,6 @@ class E2eKeysHandler:
|
||||
async def claim_one_time_keys(
|
||||
self,
|
||||
query: Dict[str, Dict[str, Dict[str, int]]],
|
||||
user: UserID,
|
||||
timeout: Optional[int],
|
||||
always_include_fallback_keys: bool,
|
||||
) -> JsonDict:
|
||||
@@ -704,7 +703,7 @@ class E2eKeysHandler:
|
||||
device_keys = remote_queries[destination]
|
||||
try:
|
||||
remote_result = await self.federation.claim_client_keys(
|
||||
user, destination, device_keys, timeout=timeout
|
||||
destination, device_keys, timeout=timeout
|
||||
)
|
||||
for user_id, keys in remote_result["one_time_keys"].items():
|
||||
if user_id in device_keys:
|
||||
|
||||
@@ -148,7 +148,7 @@ class FederationHandler:
|
||||
self._event_auth_handler = hs.get_event_auth_handler()
|
||||
self._server_notices_mxid = hs.config.servernotices.server_notices_mxid
|
||||
self.config = hs.config
|
||||
self.http_client = hs.get_proxied_blocklisted_http_client()
|
||||
self.http_client = hs.get_proxied_blacklisted_http_client()
|
||||
self._replication = hs.get_replication_data_handler()
|
||||
self._federation_event_handler = hs.get_federation_event_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
|
||||
@@ -88,7 +88,7 @@ from synapse.types import (
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
||||
from synapse.util.iterutils import batch_iter, partition
|
||||
from synapse.util.iterutils import batch_iter
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
from synapse.util.stringutils import shortstr
|
||||
|
||||
@@ -865,7 +865,7 @@ class FederationEventHandler:
|
||||
[event.event_id for event in events]
|
||||
)
|
||||
|
||||
new_events: List[EventBase] = []
|
||||
new_events = []
|
||||
for event in events:
|
||||
event_id = event.event_id
|
||||
|
||||
@@ -890,71 +890,12 @@ class FederationEventHandler:
|
||||
# Continue on with the events that are new to us.
|
||||
new_events.append(event)
|
||||
|
||||
set_tag(
|
||||
SynapseTags.RESULT_PREFIX + "new_events.length",
|
||||
str(len(new_events)),
|
||||
)
|
||||
|
||||
@trace
|
||||
async def _process_new_pulled_events(new_events: Collection[EventBase]) -> None:
|
||||
# We want to sort these by depth so we process them and tell clients about
|
||||
# them in order. It's also more efficient to backfill this way (`depth`
|
||||
# ascending) because one backfill event is likely to be the `prev_event` of
|
||||
# the next event we're going to process.
|
||||
sorted_events = sorted(new_events, key=lambda x: x.depth)
|
||||
for ev in sorted_events:
|
||||
with nested_logging_context(ev.event_id):
|
||||
await self._process_pulled_event(origin, ev, backfilled=backfilled)
|
||||
|
||||
# Check if we've already tried to process these events at some point in the
|
||||
# past. We aren't concerned with the expontntial backoff here, just whether it
|
||||
# has failed to be processed before.
|
||||
event_ids_with_failed_pull_attempts = (
|
||||
await self._store.get_event_ids_with_failed_pull_attempts(
|
||||
[event.event_id for event in new_events]
|
||||
)
|
||||
)
|
||||
|
||||
# We construct the event lists in source order from `/backfill` response because
|
||||
# it's a) easiest, but also b) the order in which we process things matters for
|
||||
# MSC2716 historical batches because many historical events are all at the same
|
||||
# `depth` and we rely on the tenuous sort that the other server gave us and hope
|
||||
# they're doing their best. The brittle nature of this ordering for historical
|
||||
# messages over federation is one of the reasons why we don't want to continue
|
||||
# on MSC2716 until we have online topological ordering.
|
||||
events_with_failed_pull_attempts, fresh_events = partition(
|
||||
new_events, lambda e: e.event_id in event_ids_with_failed_pull_attempts
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "events_with_failed_pull_attempts",
|
||||
str(event_ids_with_failed_pull_attempts),
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.RESULT_PREFIX + "events_with_failed_pull_attempts.length",
|
||||
str(len(events_with_failed_pull_attempts)),
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "fresh_events",
|
||||
str([event.event_id for event in fresh_events]),
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.RESULT_PREFIX + "fresh_events.length",
|
||||
str(len(fresh_events)),
|
||||
)
|
||||
|
||||
# Process previously failed backfill events in the background to not waste
|
||||
# time on something that is likely to fail again.
|
||||
if len(events_with_failed_pull_attempts) > 0:
|
||||
run_as_background_process(
|
||||
"_process_new_pulled_events_with_failed_pull_attempts",
|
||||
_process_new_pulled_events,
|
||||
events_with_failed_pull_attempts,
|
||||
)
|
||||
|
||||
# We can optimistically try to process and wait for the event to be fully
|
||||
# persisted if we've never tried before.
|
||||
if len(fresh_events) > 0:
|
||||
await _process_new_pulled_events(fresh_events)
|
||||
# We want to sort these by depth so we process them and
|
||||
# tell clients about them in order.
|
||||
sorted_events = sorted(new_events, key=lambda x: x.depth)
|
||||
for ev in sorted_events:
|
||||
with nested_logging_context(ev.event_id):
|
||||
await self._process_pulled_event(origin, ev, backfilled=backfilled)
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
|
||||
@@ -52,10 +52,10 @@ class IdentityHandler:
|
||||
# An HTTP client for contacting trusted URLs.
|
||||
self.http_client = SimpleHttpClient(hs)
|
||||
# An HTTP client for contacting identity servers specified by clients.
|
||||
self._http_client = SimpleHttpClient(
|
||||
self.blacklisting_http_client = SimpleHttpClient(
|
||||
hs,
|
||||
ip_blocklist=hs.config.server.federation_ip_range_blocklist,
|
||||
ip_allowlist=hs.config.server.federation_ip_range_allowlist,
|
||||
ip_blacklist=hs.config.server.federation_ip_range_blacklist,
|
||||
ip_whitelist=hs.config.server.federation_ip_range_whitelist,
|
||||
)
|
||||
self.federation_http_client = hs.get_federation_http_client()
|
||||
self.hs = hs
|
||||
@@ -197,7 +197,7 @@ class IdentityHandler:
|
||||
try:
|
||||
# Use the blacklisting http client as this call is only to identity servers
|
||||
# provided by a client
|
||||
data = await self._http_client.post_json_get_json(
|
||||
data = await self.blacklisting_http_client.post_json_get_json(
|
||||
bind_url, bind_data, headers=headers
|
||||
)
|
||||
|
||||
@@ -308,7 +308,9 @@ class IdentityHandler:
|
||||
try:
|
||||
# Use the blacklisting http client as this call is only to identity servers
|
||||
# provided by a client
|
||||
await self._http_client.post_json_get_json(url, content, headers)
|
||||
await self.blacklisting_http_client.post_json_get_json(
|
||||
url, content, headers
|
||||
)
|
||||
changed = True
|
||||
except HttpResponseException as e:
|
||||
changed = False
|
||||
@@ -577,7 +579,7 @@ class IdentityHandler:
|
||||
"""
|
||||
# Check what hashing details are supported by this identity server
|
||||
try:
|
||||
hash_details = await self._http_client.get_json(
|
||||
hash_details = await self.blacklisting_http_client.get_json(
|
||||
"%s%s/_matrix/identity/v2/hash_details" % (id_server_scheme, id_server),
|
||||
{"access_token": id_access_token},
|
||||
)
|
||||
@@ -644,7 +646,7 @@ class IdentityHandler:
|
||||
headers = {"Authorization": create_id_access_token_header(id_access_token)}
|
||||
|
||||
try:
|
||||
lookup_results = await self._http_client.post_json_get_json(
|
||||
lookup_results = await self.blacklisting_http_client.post_json_get_json(
|
||||
"%s%s/_matrix/identity/v2/lookup" % (id_server_scheme, id_server),
|
||||
{
|
||||
"addresses": [lookup_value],
|
||||
@@ -750,7 +752,7 @@ class IdentityHandler:
|
||||
|
||||
url = "%s%s/_matrix/identity/v2/store-invite" % (id_server_scheme, id_server)
|
||||
try:
|
||||
data = await self._http_client.post_json_get_json(
|
||||
data = await self.blacklisting_http_client.post_json_get_json(
|
||||
url,
|
||||
invite_config,
|
||||
{"Authorization": create_id_access_token_header(id_access_token)},
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
# Copyright 2023 Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from authlib.jose import JsonWebToken, JWTClaims
|
||||
from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError
|
||||
|
||||
from synapse.api.errors import Codes, LoginError
|
||||
from synapse.types import JsonDict, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class JwtHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
|
||||
self.jwt_secret = hs.config.jwt.jwt_secret
|
||||
self.jwt_subject_claim = hs.config.jwt.jwt_subject_claim
|
||||
self.jwt_algorithm = hs.config.jwt.jwt_algorithm
|
||||
self.jwt_issuer = hs.config.jwt.jwt_issuer
|
||||
self.jwt_audiences = hs.config.jwt.jwt_audiences
|
||||
|
||||
def validate_login(self, login_submission: JsonDict) -> str:
|
||||
"""
|
||||
Authenticates the user for the /login API
|
||||
|
||||
Args:
|
||||
login_submission: the whole of the login submission
|
||||
(including 'type' and other relevant fields)
|
||||
|
||||
Returns:
|
||||
The user ID that is logging in.
|
||||
|
||||
Raises:
|
||||
LoginError if there was an authentication problem.
|
||||
"""
|
||||
token = login_submission.get("token", None)
|
||||
if token is None:
|
||||
raise LoginError(
|
||||
403, "Token field for JWT is missing", errcode=Codes.FORBIDDEN
|
||||
)
|
||||
|
||||
jwt = JsonWebToken([self.jwt_algorithm])
|
||||
claim_options = {}
|
||||
if self.jwt_issuer is not None:
|
||||
claim_options["iss"] = {"value": self.jwt_issuer, "essential": True}
|
||||
if self.jwt_audiences is not None:
|
||||
claim_options["aud"] = {"values": self.jwt_audiences, "essential": True}
|
||||
|
||||
try:
|
||||
claims = jwt.decode(
|
||||
token,
|
||||
key=self.jwt_secret,
|
||||
claims_cls=JWTClaims,
|
||||
claims_options=claim_options,
|
||||
)
|
||||
except BadSignatureError:
|
||||
# We handle this case separately to provide a better error message
|
||||
raise LoginError(
|
||||
403,
|
||||
"JWT validation failed: Signature verification failed",
|
||||
errcode=Codes.FORBIDDEN,
|
||||
)
|
||||
except JoseError as e:
|
||||
# A JWT error occurred, return some info back to the client.
|
||||
raise LoginError(
|
||||
403,
|
||||
"JWT validation failed: %s" % (str(e),),
|
||||
errcode=Codes.FORBIDDEN,
|
||||
)
|
||||
|
||||
try:
|
||||
claims.validate(leeway=120) # allows 2 min of clock skew
|
||||
|
||||
# Enforce the old behavior which is rolled out in productive
|
||||
# servers: if the JWT contains an 'aud' claim but none is
|
||||
# configured, the login attempt will fail
|
||||
if claims.get("aud") is not None:
|
||||
if self.jwt_audiences is None or len(self.jwt_audiences) == 0:
|
||||
raise InvalidClaimError("aud")
|
||||
except JoseError as e:
|
||||
raise LoginError(
|
||||
403,
|
||||
"JWT validation failed: %s" % (str(e),),
|
||||
errcode=Codes.FORBIDDEN,
|
||||
)
|
||||
|
||||
user = claims.get(self.jwt_subject_claim, None)
|
||||
if user is None:
|
||||
raise LoginError(403, "Invalid JWT", errcode=Codes.FORBIDDEN)
|
||||
|
||||
return UserID(user, self.hs.hostname).to_string()
|
||||
@@ -16,7 +16,6 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from synapse.api.constants import ReceiptTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -48,21 +47,12 @@ class ReadMarkerHandler:
|
||||
)
|
||||
|
||||
should_update = True
|
||||
# Get event ordering, this also ensures we know about the event
|
||||
event_ordering = await self.store.get_event_ordering(event_id)
|
||||
|
||||
if existing_read_marker:
|
||||
try:
|
||||
old_event_ordering = await self.store.get_event_ordering(
|
||||
existing_read_marker["event_id"]
|
||||
)
|
||||
except SynapseError:
|
||||
# Old event no longer exists, assume new is ahead. This may
|
||||
# happen if the old event was removed due to retention.
|
||||
pass
|
||||
else:
|
||||
# Only update if the new marker is ahead in the stream
|
||||
should_update = event_ordering > old_event_ordering
|
||||
# Only update if the new marker is ahead in the stream
|
||||
should_update = await self.store.is_event_after(
|
||||
event_id, existing_read_marker["event_id"]
|
||||
)
|
||||
|
||||
if should_update:
|
||||
content = {"event_id": event_id}
|
||||
|
||||
@@ -204,7 +204,7 @@ class SsoHandler:
|
||||
self._media_repo = (
|
||||
hs.get_media_repository() if hs.config.media.can_load_media_repo else None
|
||||
)
|
||||
self._http_client = hs.get_proxied_blocklisted_http_client()
|
||||
self._http_client = hs.get_proxied_blacklisted_http_client()
|
||||
|
||||
# The following template is shown after a successful user interactive
|
||||
# authentication session. It tells the user they can close the window.
|
||||
|
||||
@@ -117,22 +117,22 @@ RawHeaderValue = Union[
|
||||
]
|
||||
|
||||
|
||||
def _is_ip_blocked(
|
||||
ip_address: IPAddress, allowlist: Optional[IPSet], blocklist: IPSet
|
||||
def check_against_blacklist(
|
||||
ip_address: IPAddress, ip_whitelist: Optional[IPSet], ip_blacklist: IPSet
|
||||
) -> bool:
|
||||
"""
|
||||
Compares an IP address to allowed and disallowed IP sets.
|
||||
|
||||
Args:
|
||||
ip_address: The IP address to check
|
||||
allowlist: Allowed IP addresses.
|
||||
blocklist: Disallowed IP addresses.
|
||||
ip_whitelist: Allowed IP addresses.
|
||||
ip_blacklist: Disallowed IP addresses.
|
||||
|
||||
Returns:
|
||||
True if the IP address is in the blocklist and not in the allowlist.
|
||||
True if the IP address is in the blacklist and not in the whitelist.
|
||||
"""
|
||||
if ip_address in blocklist:
|
||||
if allowlist is None or ip_address not in allowlist:
|
||||
if ip_address in ip_blacklist:
|
||||
if ip_whitelist is None or ip_address not in ip_whitelist:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -154,27 +154,27 @@ def _make_scheduler(
|
||||
return _scheduler
|
||||
|
||||
|
||||
class _IPBlockingResolver:
|
||||
class _IPBlacklistingResolver:
|
||||
"""
|
||||
A proxy for reactor.nameResolver which only produces non-blocklisted IP
|
||||
addresses, preventing DNS rebinding attacks.
|
||||
A proxy for reactor.nameResolver which only produces non-blacklisted IP
|
||||
addresses, preventing DNS rebinding attacks on URL preview.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
reactor: IReactorPluggableNameResolver,
|
||||
ip_allowlist: Optional[IPSet],
|
||||
ip_blocklist: IPSet,
|
||||
ip_whitelist: Optional[IPSet],
|
||||
ip_blacklist: IPSet,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
reactor: The twisted reactor.
|
||||
ip_allowlist: IP addresses to allow.
|
||||
ip_blocklist: IP addresses to disallow.
|
||||
ip_whitelist: IP addresses to allow.
|
||||
ip_blacklist: IP addresses to disallow.
|
||||
"""
|
||||
self._reactor = reactor
|
||||
self._ip_allowlist = ip_allowlist
|
||||
self._ip_blocklist = ip_blocklist
|
||||
self._ip_whitelist = ip_whitelist
|
||||
self._ip_blacklist = ip_blacklist
|
||||
|
||||
def resolveHostName(
|
||||
self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0
|
||||
@@ -191,13 +191,16 @@ class _IPBlockingResolver:
|
||||
|
||||
ip_address = IPAddress(address.host)
|
||||
|
||||
if _is_ip_blocked(ip_address, self._ip_allowlist, self._ip_blocklist):
|
||||
if check_against_blacklist(
|
||||
ip_address, self._ip_whitelist, self._ip_blacklist
|
||||
):
|
||||
logger.info(
|
||||
"Blocked %s from DNS resolution to %s" % (ip_address, hostname)
|
||||
"Dropped %s from DNS resolution to %s due to blacklist"
|
||||
% (ip_address, hostname)
|
||||
)
|
||||
has_bad_ip = True
|
||||
|
||||
# if we have a blocked IP, we'd like to raise an error to block the
|
||||
# if we have a blacklisted IP, we'd like to raise an error to block the
|
||||
# request, but all we can really do from here is claim that there were no
|
||||
# valid results.
|
||||
if not has_bad_ip:
|
||||
@@ -229,24 +232,24 @@ class _IPBlockingResolver:
|
||||
# ISynapseReactor implies IReactorCore, but explicitly marking it this as an implementer
|
||||
# of IReactorCore seems to keep mypy-zope happier.
|
||||
@implementer(IReactorCore, ISynapseReactor)
|
||||
class BlocklistingReactorWrapper:
|
||||
class BlacklistingReactorWrapper:
|
||||
"""
|
||||
A Reactor wrapper which will prevent DNS resolution to blocked IP
|
||||
A Reactor wrapper which will prevent DNS resolution to blacklisted IP
|
||||
addresses, to prevent DNS rebinding.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
reactor: IReactorPluggableNameResolver,
|
||||
ip_allowlist: Optional[IPSet],
|
||||
ip_blocklist: IPSet,
|
||||
ip_whitelist: Optional[IPSet],
|
||||
ip_blacklist: IPSet,
|
||||
):
|
||||
self._reactor = reactor
|
||||
|
||||
# We need to use a DNS resolver which filters out blocked IP
|
||||
# We need to use a DNS resolver which filters out blacklisted IP
|
||||
# addresses, to prevent DNS rebinding.
|
||||
self._nameResolver = _IPBlockingResolver(
|
||||
self._reactor, ip_allowlist, ip_blocklist
|
||||
self._nameResolver = _IPBlacklistingResolver(
|
||||
self._reactor, ip_whitelist, ip_blacklist
|
||||
)
|
||||
|
||||
def __getattr__(self, attr: str) -> Any:
|
||||
@@ -257,7 +260,7 @@ class BlocklistingReactorWrapper:
|
||||
return getattr(self._reactor, attr)
|
||||
|
||||
|
||||
class BlocklistingAgentWrapper(Agent):
|
||||
class BlacklistingAgentWrapper(Agent):
|
||||
"""
|
||||
An Agent wrapper which will prevent access to IP addresses being accessed
|
||||
directly (without an IP address lookup).
|
||||
@@ -266,18 +269,18 @@ class BlocklistingAgentWrapper(Agent):
|
||||
def __init__(
|
||||
self,
|
||||
agent: IAgent,
|
||||
ip_blocklist: IPSet,
|
||||
ip_allowlist: Optional[IPSet] = None,
|
||||
ip_blacklist: IPSet,
|
||||
ip_whitelist: Optional[IPSet] = None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
agent: The Agent to wrap.
|
||||
ip_allowlist: IP addresses to allow.
|
||||
ip_blocklist: IP addresses to disallow.
|
||||
ip_whitelist: IP addresses to allow.
|
||||
ip_blacklist: IP addresses to disallow.
|
||||
"""
|
||||
self._agent = agent
|
||||
self._ip_allowlist = ip_allowlist
|
||||
self._ip_blocklist = ip_blocklist
|
||||
self._ip_whitelist = ip_whitelist
|
||||
self._ip_blacklist = ip_blacklist
|
||||
|
||||
def request(
|
||||
self,
|
||||
@@ -296,9 +299,13 @@ class BlocklistingAgentWrapper(Agent):
|
||||
# Not an IP
|
||||
pass
|
||||
else:
|
||||
if _is_ip_blocked(ip_address, self._ip_allowlist, self._ip_blocklist):
|
||||
logger.info("Blocking access to %s" % (ip_address,))
|
||||
e = SynapseError(HTTPStatus.FORBIDDEN, "IP address blocked")
|
||||
if check_against_blacklist(
|
||||
ip_address, self._ip_whitelist, self._ip_blacklist
|
||||
):
|
||||
logger.info("Blocking access to %s due to blacklist" % (ip_address,))
|
||||
e = SynapseError(
|
||||
HTTPStatus.FORBIDDEN, "IP address blocked by IP blacklist entry"
|
||||
)
|
||||
return defer.fail(Failure(e))
|
||||
|
||||
return self._agent.request(
|
||||
@@ -756,9 +763,10 @@ class SimpleHttpClient(BaseHttpClient):
|
||||
Args:
|
||||
hs: The HomeServer instance to pass in
|
||||
treq_args: Extra keyword arguments to be given to treq.request.
|
||||
ip_blocklist: The IP addresses that we may not request.
|
||||
ip_allowlist: The allowed IP addresses, that we can
|
||||
request if it were otherwise caught in a blocklist.
|
||||
ip_blacklist: The IP addresses that are blacklisted that
|
||||
we may not request.
|
||||
ip_whitelist: The whitelisted IP addresses, that we can
|
||||
request if it were otherwise caught in a blacklist.
|
||||
use_proxy: Whether proxy settings should be discovered and used
|
||||
from conventional environment variables.
|
||||
"""
|
||||
@@ -767,19 +775,19 @@ class SimpleHttpClient(BaseHttpClient):
|
||||
self,
|
||||
hs: "HomeServer",
|
||||
treq_args: Optional[Dict[str, Any]] = None,
|
||||
ip_allowlist: Optional[IPSet] = None,
|
||||
ip_blocklist: Optional[IPSet] = None,
|
||||
ip_whitelist: Optional[IPSet] = None,
|
||||
ip_blacklist: Optional[IPSet] = None,
|
||||
use_proxy: bool = False,
|
||||
):
|
||||
super().__init__(hs, treq_args=treq_args)
|
||||
self._ip_allowlist = ip_allowlist
|
||||
self._ip_blocklist = ip_blocklist
|
||||
self._ip_whitelist = ip_whitelist
|
||||
self._ip_blacklist = ip_blacklist
|
||||
|
||||
if self._ip_blocklist:
|
||||
# If we have an IP blocklist, we need to use a DNS resolver which
|
||||
# filters out blocked IP addresses, to prevent DNS rebinding.
|
||||
self.reactor: ISynapseReactor = BlocklistingReactorWrapper(
|
||||
self.reactor, self._ip_allowlist, self._ip_blocklist
|
||||
if self._ip_blacklist:
|
||||
# If we have an IP blacklist, we need to use a DNS resolver which
|
||||
# filters out blacklisted IP addresses, to prevent DNS rebinding.
|
||||
self.reactor: ISynapseReactor = BlacklistingReactorWrapper(
|
||||
self.reactor, self._ip_whitelist, self._ip_blacklist
|
||||
)
|
||||
|
||||
# the pusher makes lots of concurrent SSL connections to Sygnal, and tends to
|
||||
@@ -801,13 +809,14 @@ class SimpleHttpClient(BaseHttpClient):
|
||||
use_proxy=use_proxy,
|
||||
)
|
||||
|
||||
if self._ip_blocklist:
|
||||
# If we have an IP blocklist, we then install the Agent which prevents
|
||||
# direct access to IP addresses, that are not caught by the DNS resolution.
|
||||
self.agent = BlocklistingAgentWrapper(
|
||||
if self._ip_blacklist:
|
||||
# If we have an IP blacklist, we then install the blacklisting Agent
|
||||
# which prevents direct access to IP addresses, that are not caught
|
||||
# by the DNS resolution.
|
||||
self.agent = BlacklistingAgentWrapper(
|
||||
self.agent,
|
||||
ip_blocklist=self._ip_blocklist,
|
||||
ip_allowlist=self._ip_allowlist,
|
||||
ip_blacklist=self._ip_blacklist,
|
||||
ip_whitelist=self._ip_whitelist,
|
||||
)
|
||||
|
||||
|
||||
@@ -835,7 +844,6 @@ class ReplicationClient(BaseHttpClient):
|
||||
|
||||
self.agent: IAgent = ReplicationAgent(
|
||||
hs.get_reactor(),
|
||||
hs.config.worker.instance_map,
|
||||
contextFactory=hs.get_http_client_context_factory(),
|
||||
pool=pool,
|
||||
)
|
||||
|
||||
@@ -36,7 +36,7 @@ from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer, IResp
|
||||
|
||||
from synapse.crypto.context_factory import FederationPolicyForHTTPS
|
||||
from synapse.http import proxyagent
|
||||
from synapse.http.client import BlocklistingAgentWrapper, BlocklistingReactorWrapper
|
||||
from synapse.http.client import BlacklistingAgentWrapper, BlacklistingReactorWrapper
|
||||
from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
|
||||
from synapse.http.federation.srv_resolver import Server, SrvResolver
|
||||
from synapse.http.federation.well_known_resolver import WellKnownResolver
|
||||
@@ -65,12 +65,12 @@ class MatrixFederationAgent:
|
||||
user_agent:
|
||||
The user agent header to use for federation requests.
|
||||
|
||||
ip_allowlist: Allowed IP addresses.
|
||||
ip_whitelist: Allowed IP addresses.
|
||||
|
||||
ip_blocklist: Disallowed IP addresses.
|
||||
ip_blacklist: Disallowed IP addresses.
|
||||
|
||||
proxy_reactor: twisted reactor to use for connections to the proxy server
|
||||
reactor might have some blocking applied (i.e. for DNS queries),
|
||||
reactor might have some blacklisting applied (i.e. for DNS queries),
|
||||
but we need unblocked access to the proxy.
|
||||
|
||||
_srv_resolver:
|
||||
@@ -87,17 +87,17 @@ class MatrixFederationAgent:
|
||||
reactor: ISynapseReactor,
|
||||
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
|
||||
user_agent: bytes,
|
||||
ip_allowlist: Optional[IPSet],
|
||||
ip_blocklist: IPSet,
|
||||
ip_whitelist: Optional[IPSet],
|
||||
ip_blacklist: IPSet,
|
||||
_srv_resolver: Optional[SrvResolver] = None,
|
||||
_well_known_resolver: Optional[WellKnownResolver] = None,
|
||||
):
|
||||
# proxy_reactor is not blocklisting reactor
|
||||
# proxy_reactor is not blacklisted
|
||||
proxy_reactor = reactor
|
||||
|
||||
# We need to use a DNS resolver which filters out blocked IP
|
||||
# We need to use a DNS resolver which filters out blacklisted IP
|
||||
# addresses, to prevent DNS rebinding.
|
||||
reactor = BlocklistingReactorWrapper(reactor, ip_allowlist, ip_blocklist)
|
||||
reactor = BlacklistingReactorWrapper(reactor, ip_whitelist, ip_blacklist)
|
||||
|
||||
self._clock = Clock(reactor)
|
||||
self._pool = HTTPConnectionPool(reactor)
|
||||
@@ -120,7 +120,7 @@ class MatrixFederationAgent:
|
||||
if _well_known_resolver is None:
|
||||
_well_known_resolver = WellKnownResolver(
|
||||
reactor,
|
||||
agent=BlocklistingAgentWrapper(
|
||||
agent=BlacklistingAgentWrapper(
|
||||
ProxyAgent(
|
||||
reactor,
|
||||
proxy_reactor,
|
||||
@@ -128,7 +128,7 @@ class MatrixFederationAgent:
|
||||
contextFactory=tls_client_options_factory,
|
||||
use_proxy=True,
|
||||
),
|
||||
ip_blocklist=ip_blocklist,
|
||||
ip_blacklist=ip_blacklist,
|
||||
),
|
||||
user_agent=self.user_agent,
|
||||
)
|
||||
@@ -256,7 +256,7 @@ class MatrixHostnameEndpoint:
|
||||
Args:
|
||||
reactor: twisted reactor to use for underlying requests
|
||||
proxy_reactor: twisted reactor to use for connections to the proxy server.
|
||||
'reactor' might have some blocking applied (i.e. for DNS queries),
|
||||
'reactor' might have some blacklisting applied (i.e. for DNS queries),
|
||||
but we need unblocked access to the proxy.
|
||||
tls_client_options_factory:
|
||||
factory to use for fetching client tls options, or none to disable TLS.
|
||||
|
||||
@@ -50,7 +50,7 @@ from twisted.internet.interfaces import IReactorTime
|
||||
from twisted.internet.task import Cooperator
|
||||
from twisted.web.client import ResponseFailed
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web.iweb import IBodyProducer, IResponse
|
||||
from twisted.web.iweb import IAgent, IBodyProducer, IResponse
|
||||
|
||||
import synapse.metrics
|
||||
import synapse.util.retryutils
|
||||
@@ -64,7 +64,7 @@ from synapse.api.errors import (
|
||||
from synapse.crypto.context_factory import FederationPolicyForHTTPS
|
||||
from synapse.http import QuieterFileBodyProducer
|
||||
from synapse.http.client import (
|
||||
BlocklistingAgentWrapper,
|
||||
BlacklistingAgentWrapper,
|
||||
BodyExceededMaxSize,
|
||||
ByteWriteable,
|
||||
_make_scheduler,
|
||||
@@ -392,15 +392,15 @@ class MatrixFederationHttpClient:
|
||||
self.reactor,
|
||||
tls_client_options_factory,
|
||||
user_agent.encode("ascii"),
|
||||
hs.config.server.federation_ip_range_allowlist,
|
||||
hs.config.server.federation_ip_range_blocklist,
|
||||
hs.config.server.federation_ip_range_whitelist,
|
||||
hs.config.server.federation_ip_range_blacklist,
|
||||
)
|
||||
|
||||
# Use a BlocklistingAgentWrapper to prevent circumventing the IP
|
||||
# blocking via IP literals in server names
|
||||
self.agent = BlocklistingAgentWrapper(
|
||||
# Use a BlacklistingAgentWrapper to prevent circumventing the IP
|
||||
# blacklist via IP literals in server names
|
||||
self.agent: IAgent = BlacklistingAgentWrapper(
|
||||
federation_agent,
|
||||
ip_blocklist=hs.config.server.federation_ip_range_blocklist,
|
||||
ip_blacklist=hs.config.server.federation_ip_range_blacklist,
|
||||
)
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
150
synapse/http/proxy.py
Normal file
150
synapse/http/proxy.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# Copyright 2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, cast
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.interfaces import ITCPTransport
|
||||
from twisted.internet.protocol import connectionDone
|
||||
from twisted.python import failure
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.web.client import ResponseDone
|
||||
from twisted.web.http import PotentialDataLoss
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web.iweb import IAgent, IResponse
|
||||
from twisted.web.resource import IResource
|
||||
from twisted.web.server import Site
|
||||
|
||||
from synapse.http import QuieterFileBodyProducer
|
||||
from synapse.http.server import _AsyncResource
|
||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util.async_helpers import timeout_deferred
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.http.site import SynapseRequest
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProxyResource(_AsyncResource):
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, reactor: ISynapseReactor, federation_agent: IAgent):
|
||||
super().__init__(True)
|
||||
|
||||
self.reactor = reactor
|
||||
self.agent = federation_agent
|
||||
|
||||
async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]:
|
||||
assert request.uri.startswith(b"matrix://")
|
||||
|
||||
logger.info("Got proxy request %s", request.uri)
|
||||
|
||||
headers = Headers()
|
||||
for header_name in (b"User-Agent", b"Authorization", b"Content-Type"):
|
||||
header_value = request.getHeader(header_name)
|
||||
if header_value:
|
||||
headers.addRawHeader(header_name, header_value)
|
||||
|
||||
request_deferred = run_in_background(
|
||||
self.agent.request,
|
||||
request.method,
|
||||
request.uri,
|
||||
headers=headers,
|
||||
bodyProducer=QuieterFileBodyProducer(request.content),
|
||||
)
|
||||
request_deferred = timeout_deferred(
|
||||
request_deferred,
|
||||
timeout=90,
|
||||
reactor=self.reactor,
|
||||
)
|
||||
|
||||
response = await make_deferred_yieldable(request_deferred)
|
||||
|
||||
logger.info("Got proxy response %s", response.code)
|
||||
|
||||
return response.code, response
|
||||
|
||||
def _send_response(
|
||||
self,
|
||||
request: "SynapseRequest",
|
||||
code: int,
|
||||
response_object: Any,
|
||||
) -> None:
|
||||
response = cast(IResponse, response_object)
|
||||
|
||||
request.setResponseCode(code)
|
||||
|
||||
# Copy headers.
|
||||
for k, v in response.headers.getAllRawHeaders():
|
||||
request.responseHeaders.setRawHeaders(k, v)
|
||||
|
||||
response.deliverBody(_ProxyResponseBody(request))
|
||||
|
||||
def _send_error_response(
|
||||
self,
|
||||
f: failure.Failure,
|
||||
request: "SynapseRequest",
|
||||
) -> None:
|
||||
request.setResponseCode(502)
|
||||
request.finish()
|
||||
|
||||
|
||||
class _ProxyResponseBody(protocol.Protocol):
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(self, request: "SynapseRequest") -> None:
|
||||
self._request = request
|
||||
|
||||
def dataReceived(self, data: bytes) -> None:
|
||||
if self._request._disconnected and self.transport is not None:
|
||||
self.transport.abortConnection()
|
||||
return
|
||||
|
||||
self._request.write(data)
|
||||
|
||||
def connectionLost(self, reason: Failure = connectionDone) -> None:
|
||||
if self._request.finished:
|
||||
return
|
||||
|
||||
if reason.check(ResponseDone):
|
||||
self._request.finish()
|
||||
elif reason.check(PotentialDataLoss):
|
||||
# TODO: ARGH
|
||||
self._request.finish()
|
||||
else:
|
||||
self._request.transport.abortConnection()
|
||||
|
||||
|
||||
class ProxySite(Site):
|
||||
def __init__(
|
||||
self,
|
||||
resource: IResource,
|
||||
reactor: ISynapseReactor,
|
||||
federation_agent: IAgent,
|
||||
):
|
||||
super().__init__(resource, reactor=reactor)
|
||||
|
||||
self._proxy_resource = ProxyResource(reactor, federation_agent)
|
||||
|
||||
def getResourceFor(self, request: "SynapseRequest") -> IResource:
|
||||
uri = urllib.parse.urlparse(request.uri)
|
||||
if uri.scheme == b"matrix":
|
||||
return self._proxy_resource
|
||||
|
||||
return super().getResourceFor(request)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user