Compare commits
1 Commits
v1.98.0
...
clokep/sta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9622bda163 |
2
.github/workflows/docs-pr-netlify.yaml
vendored
2
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
path: book
|
||||
|
||||
- name: 📤 Deploy to Netlify
|
||||
uses: matrix-org/netlify-pr-preview@v3
|
||||
uses: matrix-org/netlify-pr-preview@v2
|
||||
with:
|
||||
path: book
|
||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||
|
||||
11
.github/workflows/docs-pr.yaml
vendored
11
.github/workflows/docs-pr.yaml
vendored
@@ -6,7 +6,6 @@ on:
|
||||
- docs/**
|
||||
- book.toml
|
||||
- .github/workflows/docs-pr.yaml
|
||||
- scripts-dev/schema_versions.py
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
@@ -14,22 +13,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
|
||||
10
.github/workflows/docs.yaml
vendored
10
.github/workflows/docs.yaml
vendored
@@ -51,22 +51,12 @@ jobs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
|
||||
52
.github/workflows/fix_lint.yaml
vendored
52
.github/workflows/fix_lint.yaml
vendored
@@ -1,52 +0,0 @@
|
||||
# A helper workflow to automatically fixup any linting errors on a PR. Must be
|
||||
# triggered manually.
|
||||
|
||||
name: Attempt to automatically fix linting errors
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
fixup:
|
||||
name: Fix up
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
toolchain: nightly-2022-12-01
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
install-project: "false"
|
||||
|
||||
- name: Import order (isort)
|
||||
continue-on-error: true
|
||||
run: poetry run isort .
|
||||
|
||||
- name: Code style (black)
|
||||
continue-on-error: true
|
||||
run: poetry run black .
|
||||
|
||||
- name: Semantic checks (ruff)
|
||||
continue-on-error: true
|
||||
run: poetry run ruff --fix .
|
||||
|
||||
- run: cargo clippy --all-features --fix -- -D warnings
|
||||
continue-on-error: true
|
||||
|
||||
- run: cargo fmt
|
||||
continue-on-error: true
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
2
.github/workflows/release-artifacts.yml
vendored
2
.github/workflows/release-artifacts.yml
vendored
@@ -130,7 +130,7 @@ jobs:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==2.16.2
|
||||
run: python -m pip install cibuildwheel==2.9.0
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
|
||||
181
CHANGES.md
181
CHANGES.md
@@ -1,184 +1,3 @@
|
||||
# Synapse 1.98.0 (2023-12-12)
|
||||
|
||||
Synapse 1.98.0 will be the last Synapse release in 2023; the regular release cadence will resume in January 2024.
|
||||
|
||||
Synapse will soon be forked by Element under an AGPLv3.0 licence (with CLA, for
|
||||
proprietary dual licensing). You can read more about this here:
|
||||
|
||||
- https://matrix.org/blog/2023/11/06/future-of-synapse-dendrite/
|
||||
- https://element.io/blog/element-to-adopt-agplv3/
|
||||
|
||||
The Matrix.org Foundation copy of the project will be archived. Any changes needed
|
||||
by server administrators will be communicated via our usual announcements channels,
|
||||
but we are striving to make this as seamless as possible.
|
||||
|
||||
|
||||
No significant changes since 1.98.0rc1.
|
||||
|
||||
|
||||
|
||||
# Synapse 1.98.0rc1 (2023-12-05)
|
||||
|
||||
### Features
|
||||
|
||||
- Synapse now declares support for Matrix v1.7, v1.8, and v1.9. ([\#16707](https://github.com/matrix-org/synapse/issues/16707))
|
||||
- Add `on_user_login` [module API](https://matrix-org.github.io/synapse/latest/modules/writing_a_module.html) callback for when a user logs in. ([\#15207](https://github.com/matrix-org/synapse/issues/15207))
|
||||
- Support [MSC4069: Inhibit profile propagation](https://github.com/matrix-org/matrix-spec-proposals/pull/4069). ([\#16636](https://github.com/matrix-org/synapse/issues/16636))
|
||||
- Restore tracking of requests and monthly active users when delegating authentication via [MSC3861](https://github.com/matrix-org/synapse/pull/16672) to an OIDC provider. ([\#16672](https://github.com/matrix-org/synapse/issues/16672))
|
||||
- Add an autojoin setting for server notices rooms, so users may be joined directly instead of receiving an invite. ([\#16699](https://github.com/matrix-org/synapse/issues/16699))
|
||||
- Follow redirects when downloading media over federation (per [MSC3860](https://github.com/matrix-org/matrix-spec-proposals/pull/3860)). ([\#16701](https://github.com/matrix-org/synapse/issues/16701))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Enable refreshable tokens on the admin registration endpoint. ([\#16642](https://github.com/matrix-org/synapse/issues/16642))
|
||||
- Consistently bypass rate limits when using the server notice admin API. ([\#16670](https://github.com/matrix-org/synapse/issues/16670))
|
||||
- Fix a bug introduced in Synapse 1.7.2 where rooms whose power levels lacked an `events` field could not be upgraded. ([\#16725](https://github.com/matrix-org/synapse/issues/16725))
|
||||
- Fix `GET /_synapse/admin/v1/federation/destinations` [admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) returning null (instead of 0) for `retry_last_ts` and `retry_interval`. ([\#16729](https://github.com/matrix-org/synapse/issues/16729))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Add schema rollback information to documentation. ([\#16661](https://github.com/matrix-org/synapse/issues/16661))
|
||||
- Fix poetry version typo in the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html). ([\#16695](https://github.com/matrix-org/synapse/issues/16695))
|
||||
- Switch the example UNIX socket paths to `/run`. Add HAProxy example configuration for UNIX sockets. ([\#16700](https://github.com/matrix-org/synapse/issues/16700))
|
||||
- Add documentation for how to validate the configuration file with `synapse.config` script. ([\#16714](https://github.com/matrix-org/synapse/issues/16714))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Clean-up unused tables. ([\#16522](https://github.com/matrix-org/synapse/issues/16522))
|
||||
- Reduce a little database load while processing state auth chains. ([\#16552](https://github.com/matrix-org/synapse/issues/16552))
|
||||
- Reduce database load of pruning old `user_ips`. ([\#16667](https://github.com/matrix-org/synapse/issues/16667))
|
||||
- Reduce DB load when forget on leave setting is disabled. ([\#16668](https://github.com/matrix-org/synapse/issues/16668))
|
||||
- Ignore `encryption_enabled_by_default_for_room_type` setting when creating server notices room, since the notices will be send unencrypted anyway. ([\#16677](https://github.com/matrix-org/synapse/issues/16677))
|
||||
- Correctly read the to-device stream ID on startup using SQLite. ([\#16682](https://github.com/matrix-org/synapse/issues/16682))
|
||||
- Reoranganise test files. ([\#16684](https://github.com/matrix-org/synapse/issues/16684))
|
||||
- Remove old full schema dumps which are no longer used. ([\#16697](https://github.com/matrix-org/synapse/issues/16697))
|
||||
- Raise poetry-core upper bound to <=1.8.1. This allows contributors to import Synapse after `poetry install`ing with Poetry 1.6 and above. Contributed by Mo Balaa. ([\#16702](https://github.com/matrix-org/synapse/issues/16702))
|
||||
- Add a workflow to try and automatically fixup linting in a PR. ([\#16704](https://github.com/matrix-org/synapse/issues/16704))
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump cryptography from 41.0.5 to 41.0.6. ([\#16703](https://github.com/matrix-org/synapse/issues/16703))
|
||||
* Bump cryptography from 41.0.6 to 41.0.7. ([\#16721](https://github.com/matrix-org/synapse/issues/16721))
|
||||
* Bump idna from 3.4 to 3.6. ([\#16720](https://github.com/matrix-org/synapse/issues/16720))
|
||||
* Bump jsonschema from 4.19.1 to 4.20.0. ([\#16692](https://github.com/matrix-org/synapse/issues/16692))
|
||||
* Bump matrix-org/netlify-pr-preview from 2 to 3. ([\#16719](https://github.com/matrix-org/synapse/issues/16719))
|
||||
* Bump phonenumbers from 8.13.23 to 8.13.26. ([\#16722](https://github.com/matrix-org/synapse/issues/16722))
|
||||
* Bump prometheus-client from 0.18.0 to 0.19.0. ([\#16691](https://github.com/matrix-org/synapse/issues/16691))
|
||||
* Bump pyasn1 from 0.5.0 to 0.5.1. ([\#16689](https://github.com/matrix-org/synapse/issues/16689))
|
||||
* Bump pydantic from 2.4.2 to 2.5.1. ([\#16663](https://github.com/matrix-org/synapse/issues/16663))
|
||||
* Bump pyo3 (0.19.2→0.20.0), pythonize (0.19.0→0.20.0) and pyo3-log (0.8.1→0.9.0). ([\#16673](https://github.com/matrix-org/synapse/issues/16673))
|
||||
* Bump pyopenssl from 23.2.0 to 23.3.0. ([\#16662](https://github.com/matrix-org/synapse/issues/16662))
|
||||
* Bump ruff from 0.1.4 to 0.1.6. ([\#16690](https://github.com/matrix-org/synapse/issues/16690))
|
||||
* Bump sentry-sdk from 1.32.0 to 1.35.0. ([\#16666](https://github.com/matrix-org/synapse/issues/16666))
|
||||
* Bump serde from 1.0.192 to 1.0.193. ([\#16693](https://github.com/matrix-org/synapse/issues/16693))
|
||||
* Bump sphinx-autodoc2 from 0.4.2 to 0.5.0. ([\#16723](https://github.com/matrix-org/synapse/issues/16723))
|
||||
* Bump types-jsonschema from 4.19.0.4 to 4.20.0.0. ([\#16724](https://github.com/matrix-org/synapse/issues/16724))
|
||||
* Bump types-pillow from 10.1.0.0 to 10.1.0.2. ([\#16664](https://github.com/matrix-org/synapse/issues/16664))
|
||||
* Bump types-psycopg2 from 2.9.21.15 to 2.9.21.16. ([\#16665](https://github.com/matrix-org/synapse/issues/16665))
|
||||
* Bump types-setuptools from 68.2.0.0 to 68.2.0.2. ([\#16688](https://github.com/matrix-org/synapse/issues/16688))
|
||||
|
||||
# Synapse 1.97.0 (2023-11-28)
|
||||
|
||||
Synapse will soon be forked by Element under an AGPLv3.0 licence (with CLA, for
|
||||
proprietary dual licensing). You can read more about this here:
|
||||
|
||||
- https://matrix.org/blog/2023/11/06/future-of-synapse-dendrite/
|
||||
- https://element.io/blog/element-to-adopt-agplv3/
|
||||
|
||||
The Matrix.org Foundation copy of the project will be archived. Any changes needed
|
||||
by server administrators will be communicated via our usual announcements channels,
|
||||
but we are striving to make this as seamless as possible.
|
||||
|
||||
|
||||
No significant changes since 1.97.0rc1.
|
||||
|
||||
|
||||
# Synapse 1.97.0rc1 (2023-11-21)
|
||||
|
||||
### Features
|
||||
|
||||
- Add support for asynchronous uploads as defined by [MSC2246](https://github.com/matrix-org/matrix-spec-proposals/pull/2246). Contributed by @sumnerevans at @beeper. ([\#15503](https://github.com/matrix-org/synapse/issues/15503))
|
||||
- Improve the performance of some operations in multi-worker deployments. ([\#16613](https://github.com/matrix-org/synapse/issues/16613), [\#16616](https://github.com/matrix-org/synapse/issues/16616))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix a long-standing bug where some queries updated the same row twice. Introduced in Synapse 1.57.0. ([\#16609](https://github.com/matrix-org/synapse/issues/16609))
|
||||
- Fix a long-standing bug where Synapse would not unbind third-party identifiers for Application Service users when deactivated and would not emit a compliant response. ([\#16617](https://github.com/matrix-org/synapse/issues/16617))
|
||||
- Fix sending out of order `POSITION` over replication, causing additional database load. ([\#16639](https://github.com/matrix-org/synapse/issues/16639))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Note that the option [`outbound_federation_restricted_to`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#outbound_federation_restricted_to) was added in Synapse 1.89.0, and fix a nearby formatting error. ([\#16628](https://github.com/matrix-org/synapse/issues/16628))
|
||||
- Update parameter information for the `/timestamp_to_event` admin API. ([\#16631](https://github.com/matrix-org/synapse/issues/16631))
|
||||
- Provide an example for a common encrypted media response from the admin user media API and mention possible null values. ([\#16654](https://github.com/matrix-org/synapse/issues/16654))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Remove whole table locks on push rule modifications. Contributed by Nick @ Beeper (@fizzadar). ([\#16051](https://github.com/matrix-org/synapse/issues/16051))
|
||||
- Support reactor tick timings on more types of event loops. ([\#16532](https://github.com/matrix-org/synapse/issues/16532))
|
||||
- Improve type hints. ([\#16564](https://github.com/matrix-org/synapse/issues/16564), [\#16611](https://github.com/matrix-org/synapse/issues/16611), [\#16612](https://github.com/matrix-org/synapse/issues/16612))
|
||||
- Avoid executing no-op queries. ([\#16583](https://github.com/matrix-org/synapse/issues/16583))
|
||||
- Simplify persistence code to be per-room. ([\#16584](https://github.com/matrix-org/synapse/issues/16584))
|
||||
- Use standard SQL helpers in persistence code. ([\#16585](https://github.com/matrix-org/synapse/issues/16585))
|
||||
- Avoid updating the stream cache unnecessarily. ([\#16586](https://github.com/matrix-org/synapse/issues/16586))
|
||||
- Improve performance when using opentracing. ([\#16589](https://github.com/matrix-org/synapse/issues/16589))
|
||||
- Run push rule evaluator setup in parallel. ([\#16590](https://github.com/matrix-org/synapse/issues/16590))
|
||||
- Improve tests of the SQL generator. ([\#16596](https://github.com/matrix-org/synapse/issues/16596))
|
||||
- Use more generic database methods. ([\#16615](https://github.com/matrix-org/synapse/issues/16615))
|
||||
- Use `dbname` instead of the deprecated `database` connection parameter for psycopg2. ([\#16618](https://github.com/matrix-org/synapse/issues/16618))
|
||||
- Add an internal [Admin API endpoint](https://matrix-org.github.io/synapse/v1.97/usage/configuration/config_documentation.html#allow-replacing-master-cross-signing-key-without-user-interactive-auth) to temporarily grant the ability to update an existing cross-signing key without UIA. ([\#16634](https://github.com/matrix-org/synapse/issues/16634))
|
||||
- Improve references to GitHub issues. ([\#16637](https://github.com/matrix-org/synapse/issues/16637), [\#16638](https://github.com/matrix-org/synapse/issues/16638))
|
||||
- More efficiently handle no-op `POSITION` over replication. ([\#16640](https://github.com/matrix-org/synapse/issues/16640), [\#16655](https://github.com/matrix-org/synapse/issues/16655))
|
||||
- Speed up deleting of device messages when deleting a device. ([\#16643](https://github.com/matrix-org/synapse/issues/16643))
|
||||
- Speed up persisting large number of outliers. ([\#16649](https://github.com/matrix-org/synapse/issues/16649))
|
||||
- Reduce max concurrency of background tasks, reducing potential max DB load. ([\#16656](https://github.com/matrix-org/synapse/issues/16656), [\#16660](https://github.com/matrix-org/synapse/issues/16660))
|
||||
- Speed up purge room by adding an index to `event_push_summary`. ([\#16657](https://github.com/matrix-org/synapse/issues/16657))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump prometheus-client from 0.17.1 to 0.18.0. ([\#16626](https://github.com/matrix-org/synapse/issues/16626))
|
||||
* Bump pyicu from 2.11 to 2.12. ([\#16603](https://github.com/matrix-org/synapse/issues/16603))
|
||||
* Bump requests-toolbelt from 0.10.1 to 1.0.0. ([\#16659](https://github.com/matrix-org/synapse/issues/16659))
|
||||
* Bump ruff from 0.0.292 to 0.1.4. ([\#16600](https://github.com/matrix-org/synapse/issues/16600))
|
||||
* Bump serde from 1.0.190 to 1.0.192. ([\#16627](https://github.com/matrix-org/synapse/issues/16627))
|
||||
* Bump serde_json from 1.0.107 to 1.0.108. ([\#16604](https://github.com/matrix-org/synapse/issues/16604))
|
||||
* Bump setuptools-rust from 1.8.0 to 1.8.1. ([\#16601](https://github.com/matrix-org/synapse/issues/16601))
|
||||
* Bump towncrier from 23.6.0 to 23.11.0. ([\#16622](https://github.com/matrix-org/synapse/issues/16622))
|
||||
* Bump treq from 22.2.0 to 23.11.0. ([\#16623](https://github.com/matrix-org/synapse/issues/16623))
|
||||
* Bump twisted from 23.8.0 to 23.10.0. ([\#16588](https://github.com/matrix-org/synapse/issues/16588))
|
||||
* Bump types-bleach from 6.1.0.0 to 6.1.0.1. ([\#16624](https://github.com/matrix-org/synapse/issues/16624))
|
||||
* Bump types-jsonschema from 4.19.0.3 to 4.19.0.4. ([\#16599](https://github.com/matrix-org/synapse/issues/16599))
|
||||
* Bump types-pyopenssl from 23.2.0.2 to 23.3.0.0. ([\#16625](https://github.com/matrix-org/synapse/issues/16625))
|
||||
* Bump types-pyyaml from 6.0.12.11 to 6.0.12.12. ([\#16602](https://github.com/matrix-org/synapse/issues/16602))
|
||||
|
||||
# Synapse 1.96.1 (2023-11-17)
|
||||
|
||||
Synapse will soon be forked by Element under an AGPLv3.0 licence (with CLA, for
|
||||
proprietary dual licensing). You can read more about this here:
|
||||
|
||||
* https://matrix.org/blog/2023/11/06/future-of-synapse-dendrite/
|
||||
* https://element.io/blog/element-to-adopt-agplv3/
|
||||
|
||||
The Matrix.org Foundation copy of the project will be archived. Any changes needed
|
||||
by server administrators will be communicated via our usual
|
||||
[announcements channels](https://matrix.to/#/#homeowners:matrix.org), but we are
|
||||
striving to make this as seamless as possible.
|
||||
|
||||
This minor release was needed only because of CI-related trouble on [v1.96.0](https://github.com/matrix-org/synapse/releases/tag/v1.96.0), which was never released.
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Fix building of wheels in CI. ([\#16653](https://github.com/matrix-org/synapse/issues/16653))
|
||||
|
||||
# Synapse 1.96.0 (2023-11-16)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix "'int' object is not iterable" error in `set_device_id_for_pushers` background update introduced in Synapse 1.95.0. ([\#16594](https://github.com/matrix-org/synapse/issues/16594))
|
||||
|
||||
# Synapse 1.96.0rc1 (2023-10-31)
|
||||
|
||||
### Features
|
||||
|
||||
68
Cargo.lock
generated
68
Cargo.lock
generated
@@ -90,12 +90,6 @@ dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||
|
||||
[[package]]
|
||||
name = "hex"
|
||||
version = "0.4.3"
|
||||
@@ -104,9 +98,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.4"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
|
||||
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
@@ -197,9 +191,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.20.0"
|
||||
version = "0.19.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04e8453b658fe480c3e70c8ed4e3d3ec33eb74988bd186561b0cc66b85c3bc4b"
|
||||
checksum = "e681a6cfdc4adcc93b4d3cf993749a4552018ee0a9b65fc0ccfad74352c72a38"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
@@ -215,9 +209,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.20.0"
|
||||
version = "0.19.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a96fe70b176a89cff78f2fa7b3c930081e163d5379b4dcdf993e3ae29ca662e5"
|
||||
checksum = "076c73d0bc438f7a4ef6fdd0c3bb4732149136abd952b110ac93e4edb13a6ba5"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@@ -225,9 +219,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.20.0"
|
||||
version = "0.19.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "214929900fd25e6604661ed9cf349727c8920d47deff196c4e28165a6ef2a96b"
|
||||
checksum = "e53cee42e77ebe256066ba8aa77eff722b3bb91f3419177cf4cd0f304d3284d9"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@@ -235,9 +229,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.9.0"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c10808ee7250403bedb24bc30c32493e93875fef7ba3e4292226fe924f398bd"
|
||||
checksum = "c09c2b349b6538d8a73d436ca606dab6ce0aaab4dad9e6b7bdd57a4f556c3bc3"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
@@ -246,33 +240,32 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.20.0"
|
||||
version = "0.19.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dac53072f717aa1bfa4db832b39de8c875b7c7af4f4a6fe93cdbf9264cf8383b"
|
||||
checksum = "dfeb4c99597e136528c6dd7d5e3de5434d1ceaf487436a3f03b2d56b6fc9efd1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.20.0"
|
||||
version = "0.19.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7774b5a8282bd4f25f803b1f0d945120be959a36c72e08e7cd031c792fdfd424"
|
||||
checksum = "947dc12175c254889edc0c02e399476c2f652b4b9ebd123aa655c224de259536"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pythonize"
|
||||
version = "0.20.0"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffd1c3ef39c725d63db5f9bc455461bafd80540cb7824c61afb823501921a850"
|
||||
checksum = "8e35b716d430ace57e2d1b4afb51c9e5b7c46d2bce72926e07f9be6a98ced03e"
|
||||
dependencies = [
|
||||
"pyo3",
|
||||
"serde",
|
||||
@@ -339,22 +332,22 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.193"
|
||||
version = "1.0.192"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
|
||||
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.193"
|
||||
version = "1.0.192"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
|
||||
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.28",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -380,6 +373,17 @@ version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.104"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ae548ec36cf198c0ef7710d3c230987c2d6d7bd98ad6edc0274462724c585ce"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.28"
|
||||
@@ -428,9 +432,9 @@ checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
||||
|
||||
[[package]]
|
||||
name = "unindent"
|
||||
version = "0.2.3"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
|
||||
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
|
||||
12
book.toml
12
book.toml
@@ -34,14 +34,6 @@ additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/version-picker.css",
|
||||
]
|
||||
additional-js = [
|
||||
"docs/website_files/table-of-contents.js",
|
||||
"docs/website_files/version-picker.js",
|
||||
"docs/website_files/version.js",
|
||||
]
|
||||
theme = "docs/website_files/theme"
|
||||
|
||||
[preprocessor.schema_versions]
|
||||
command = "./scripts-dev/schema_versions.py"
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
1
changelog.d/15503.feature
Normal file
1
changelog.d/15503.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for asynchronous uploads as defined by [MSC2246](https://github.com/matrix-org/matrix-spec-proposals/pull/2246). Contributed by @sumnerevans at @beeper.
|
||||
1
changelog.d/16051.misc
Normal file
1
changelog.d/16051.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove whole table locks on push rule modifications. Contributed by Nick @ Beeper (@fizzadar).
|
||||
1
changelog.d/16456.misc
Normal file
1
changelog.d/16456.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a Postgres `REPLICA IDENTITY` to tables that do not have an implicit one. This should allow use of Postgres logical replication.
|
||||
1
changelog.d/16532.misc
Normal file
1
changelog.d/16532.misc
Normal file
@@ -0,0 +1 @@
|
||||
Support reactor tick timings on more types of event loops.
|
||||
1
changelog.d/16564.misc
Normal file
1
changelog.d/16564.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve type hints.
|
||||
1
changelog.d/16583.misc
Normal file
1
changelog.d/16583.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid executing no-op queries.
|
||||
1
changelog.d/16584.misc
Normal file
1
changelog.d/16584.misc
Normal file
@@ -0,0 +1 @@
|
||||
Simplify persistance code to be per-room.
|
||||
1
changelog.d/16585.misc
Normal file
1
changelog.d/16585.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use standard SQL helpers in persistence code.
|
||||
1
changelog.d/16586.misc
Normal file
1
changelog.d/16586.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid updating the stream cache unnecessarily.
|
||||
1
changelog.d/16588.misc
Normal file
1
changelog.d/16588.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump twisted from 23.8.0 to 23.10.0.
|
||||
1
changelog.d/16589.misc
Normal file
1
changelog.d/16589.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve performance when using opentracing.
|
||||
1
changelog.d/16590.misc
Normal file
1
changelog.d/16590.misc
Normal file
@@ -0,0 +1 @@
|
||||
Run push rule evaluator setup in parallel.
|
||||
1
changelog.d/16596.misc
Normal file
1
changelog.d/16596.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve tests of the SQL generator.
|
||||
1
changelog.d/16605.misc
Normal file
1
changelog.d/16605.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump setuptools-rust from 1.8.0 to 1.8.1.
|
||||
1
changelog.d/16609.bugfix
Normal file
1
changelog.d/16609.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where some queries updated the same row twice. Introduced in Synapse 1.57.0.
|
||||
1
changelog.d/16611.misc
Normal file
1
changelog.d/16611.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve type hints.
|
||||
1
changelog.d/16612.misc
Normal file
1
changelog.d/16612.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve type hints.
|
||||
1
changelog.d/16613.feature
Normal file
1
changelog.d/16613.feature
Normal file
@@ -0,0 +1 @@
|
||||
Improve the performance of some operations in multi-worker deployments.
|
||||
1
changelog.d/16615.misc
Normal file
1
changelog.d/16615.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use more generic database methods.
|
||||
1
changelog.d/16616.feature
Normal file
1
changelog.d/16616.feature
Normal file
@@ -0,0 +1 @@
|
||||
Improve the performance of some operations in multi-worker deployments.
|
||||
1
changelog.d/16617.bugfix
Normal file
1
changelog.d/16617.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where Synapse would not unbind third-party identifiers for Application Service users when deactivated and would not emit a compliant response.
|
||||
1
changelog.d/16618.misc
Normal file
1
changelog.d/16618.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `dbname` instead of the deprecated `database` connection parameter for psycopg2.
|
||||
1
changelog.d/16628.doc
Normal file
1
changelog.d/16628.doc
Normal file
@@ -0,0 +1 @@
|
||||
Note that the option [`outbound_federation_restricted_to`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#outbound_federation_restricted_to) was added in Synapse 1.89.0, and fix a nearby formatting error.
|
||||
1
changelog.d/16631.doc
Normal file
1
changelog.d/16631.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update parameter information for the `/timestamp_to_event` admin API.
|
||||
1
changelog.d/16634.misc
Normal file
1
changelog.d/16634.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add an internal [Admin API endpoint](https://matrix-org.github.io/synapse/v1.97/usage/configuration/config_documentation.html#allow-replacing-master-cross-signing-key-without-user-interactive-auth) to temporarily grant the ability to update an existing cross-signing key without UIA.
|
||||
1
changelog.d/16637.misc
Normal file
1
changelog.d/16637.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve references to GitHub issues.
|
||||
1
changelog.d/16638.misc
Normal file
1
changelog.d/16638.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve references to GitHub issues.
|
||||
36
debian/changelog
vendored
36
debian/changelog
vendored
@@ -1,39 +1,3 @@
|
||||
matrix-synapse-py3 (1.98.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.98.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Dec 2023 15:04:31 +0000
|
||||
|
||||
matrix-synapse-py3 (1.98.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.98.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Dec 2023 13:08:42 +0000
|
||||
|
||||
matrix-synapse-py3 (1.97.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.97.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Nov 2023 14:08:58 +0000
|
||||
|
||||
matrix-synapse-py3 (1.97.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.97.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Nov 2023 12:32:03 +0000
|
||||
|
||||
matrix-synapse-py3 (1.96.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.96.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 17 Nov 2023 12:48:45 +0000
|
||||
|
||||
matrix-synapse-py3 (1.96.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.96.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 16 Nov 2023 17:54:26 +0000
|
||||
|
||||
matrix-synapse-py3 (1.96.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.96.0rc1.
|
||||
|
||||
@@ -618,16 +618,6 @@ A response body like the following is returned:
|
||||
"quarantined_by": null,
|
||||
"safe_from_quarantine": false,
|
||||
"upload_name": "test2.png"
|
||||
},
|
||||
{
|
||||
"created_ts": 300400,
|
||||
"last_access_ts": 300700,
|
||||
"media_id": "BzYNLRUgGHphBkdKGbzXwbjX",
|
||||
"media_length": 1337,
|
||||
"media_type": "application/octet-stream",
|
||||
"quarantined_by": null,
|
||||
"safe_from_quarantine": false,
|
||||
"upload_name": null
|
||||
}
|
||||
],
|
||||
"next_token": 3,
|
||||
@@ -689,17 +679,16 @@ The following fields are returned in the JSON response body:
|
||||
- `media` - An array of objects, each containing information about a media.
|
||||
Media objects contain the following fields:
|
||||
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
|
||||
- `last_access_ts` - integer or null - Timestamp when the content was last accessed in ms.
|
||||
Null if there was no access, yet.
|
||||
- `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
|
||||
- `media_id` - string - The id used to refer to the media. Details about the format
|
||||
are documented under
|
||||
[media repository](../media_repository.md).
|
||||
- `media_length` - integer - Length of the media in bytes.
|
||||
- `media_type` - string - The MIME-type of the media.
|
||||
- `quarantined_by` - string or null - The user ID that initiated the quarantine request
|
||||
for this media. Null if not quarantined.
|
||||
- `quarantined_by` - string - The user ID that initiated the quarantine request
|
||||
for this media.
|
||||
- `safe_from_quarantine` - bool - Status if this media is safe from quarantining.
|
||||
- `upload_name` - string or null - The name the media was uploaded with. Null if not provided during upload.
|
||||
- `upload_name` - string - The name the media was uploaded with.
|
||||
- `next_token`: integer - Indication for pagination. See above.
|
||||
- `total` - integer - Total number of media.
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ Of their installation methods, we recommend
|
||||
|
||||
```shell
|
||||
pip install --user pipx
|
||||
pipx install poetry==1.5.1 # Problems with Poetry 1.6, see https://github.com/matrix-org/synapse/issues/16147
|
||||
pipx install poetry==1.5.2 # Problems with Poetry 1.6, see https://github.com/matrix-org/synapse/issues/16147
|
||||
```
|
||||
|
||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||
|
||||
@@ -42,16 +42,3 @@ operations to keep track of them. (e.g. add them to a database table). The user
|
||||
represented by their Matrix user ID.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `on_user_login`
|
||||
|
||||
_First introduced in Synapse v1.98.0_
|
||||
|
||||
```python
|
||||
async def on_user_login(user_id: str, auth_provider_type: str, auth_provider_id: str) -> None
|
||||
```
|
||||
|
||||
Called after successfully login or registration of a user for cases when module needs to perform extra operations after auth.
|
||||
represented by their Matrix user ID.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
@@ -181,11 +181,7 @@ frontend matrix-federation
|
||||
backend matrix
|
||||
server matrix 127.0.0.1:8008
|
||||
```
|
||||
Example configuration, if using a UNIX socket. The configuration lines regarding the frontends do not need to be modified.
|
||||
```
|
||||
backend matrix
|
||||
server matrix unix@/run/synapse/main_public.sock
|
||||
```
|
||||
|
||||
|
||||
[Delegation](delegate.md) example:
|
||||
```
|
||||
|
||||
@@ -46,7 +46,6 @@ server_notices:
|
||||
system_mxid_display_name: "Server Notices"
|
||||
system_mxid_avatar_url: "mxc://server.com/oumMVlgDnLYFaPVkExemNVVZ"
|
||||
room_name: "Server Notices"
|
||||
auto_join: true
|
||||
```
|
||||
|
||||
The only compulsory setting is `system_mxid_localpart`, which defines the user
|
||||
@@ -56,8 +55,6 @@ room which will be created.
|
||||
`system_mxid_display_name` and `system_mxid_avatar_url` can be used to set the
|
||||
displayname and avatar of the Server Notices user.
|
||||
|
||||
`auto_join` will autojoin users to the notices room instead of sending an invite.
|
||||
|
||||
## Sending notices
|
||||
|
||||
To send server notices to users you can use the
|
||||
|
||||
@@ -88,15 +88,6 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
Generally Synapse database schemas are compatible across multiple versions, once
|
||||
a version of Synapse is deployed you may not be able to rollback automatically.
|
||||
The following table gives the version ranges and the earliest version they can
|
||||
be rolled back to. E.g. Synapse versions v1.58.0 through v1.61.1 can be rolled
|
||||
back safely to v1.57.0, but starting with v1.62.0 it is only safe to rollback to
|
||||
v1.61.0.
|
||||
|
||||
<!-- REPLACE_WITH_SCHEMA_VERSIONS -->
|
||||
|
||||
# Upgrading to v1.93.0
|
||||
|
||||
## Minimum supported Rust version
|
||||
|
||||
@@ -33,23 +33,6 @@ In addition, configuration options referring to size use the following suffixes:
|
||||
For example, setting `max_avatar_size: 10M` means that Synapse will not accept files larger than 10,485,760 bytes
|
||||
for a user avatar.
|
||||
|
||||
## Config Validation
|
||||
|
||||
The configuration file can be validated with the following command:
|
||||
```bash
|
||||
python -m synapse.config read <config key to print> -c <path to config>
|
||||
```
|
||||
|
||||
To validate the entire file, omit `read <config key to print>`:
|
||||
```bash
|
||||
python -m synapse.config -c <path to config>
|
||||
```
|
||||
|
||||
To see how to set other options, check the help reference:
|
||||
```bash
|
||||
python -m synapse.config --help
|
||||
```
|
||||
|
||||
### YAML
|
||||
The configuration file is a [YAML](https://yaml.org/) file, which means that certain syntax rules
|
||||
apply if you want your config file to be read properly. A few helpful things to know:
|
||||
@@ -583,7 +566,7 @@ listeners:
|
||||
# Note that x_forwarded will default to true, when using a UNIX socket. Please see
|
||||
# https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
|
||||
#
|
||||
- path: /run/synapse/main_public.sock
|
||||
- path: /var/run/synapse/main_public.sock
|
||||
type: http
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
@@ -3832,8 +3815,6 @@ Sub-options for this setting include:
|
||||
* `system_mxid_display_name`: set the display name of the "notices" user
|
||||
* `system_mxid_avatar_url`: set the avatar for the "notices" user
|
||||
* `room_name`: set the room name of the server notices room
|
||||
* `auto_join`: boolean. If true, the user will be automatically joined to the room instead of being invited.
|
||||
Defaults to false. _Added in Synapse 1.98.0._
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -3842,7 +3823,6 @@ server_notices:
|
||||
system_mxid_display_name: "Server Notices"
|
||||
system_mxid_avatar_url: "mxc://server.com/oumMVlgDnLYFaPVkExemNVVZ"
|
||||
room_name: "Server Notices"
|
||||
auto_join: true
|
||||
```
|
||||
---
|
||||
### `enable_room_list_search`
|
||||
@@ -4235,9 +4215,9 @@ Example configuration(#2, for UNIX sockets):
|
||||
```yaml
|
||||
instance_map:
|
||||
main:
|
||||
path: /run/synapse/main_replication.sock
|
||||
path: /var/run/synapse/main_replication.sock
|
||||
worker1:
|
||||
path: /run/synapse/worker1_replication.sock
|
||||
path: /var/run/synapse/worker1_replication.sock
|
||||
```
|
||||
---
|
||||
### `stream_writers`
|
||||
@@ -4423,13 +4403,13 @@ Example configuration(#2, using UNIX sockets with a `replication` listener):
|
||||
```yaml
|
||||
worker_listeners:
|
||||
- type: http
|
||||
path: /run/synapse/worker_replication.sock
|
||||
resources:
|
||||
- names: [replication]
|
||||
- type: http
|
||||
path: /run/synapse/worker_public.sock
|
||||
path: /var/run/synapse/worker_public.sock
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
- type: http
|
||||
path: /var/run/synapse/worker_replication.sock
|
||||
resources:
|
||||
- names: [replication]
|
||||
```
|
||||
---
|
||||
### `worker_manhole`
|
||||
|
||||
@@ -24,11 +24,6 @@ Finally, we also stylise the chapter titles in the left sidebar by indenting the
|
||||
slightly so that they are more visually distinguishable from the section headers
|
||||
(the bold titles). This is done through the `indent-section-headers.css` file.
|
||||
|
||||
In addition to these modifications, we have added a version picker to the documentation.
|
||||
Users can switch between documentations for different versions of Synapse.
|
||||
This functionality was implemented through the `version-picker.js` and
|
||||
`version-picker.css` files.
|
||||
|
||||
More information can be found in mdbook's official documentation for
|
||||
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
|
||||
and
|
||||
|
||||
@@ -131,18 +131,6 @@
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
{{/if}}
|
||||
<div class="version-picker">
|
||||
<div class="dropdown">
|
||||
<div class="select">
|
||||
<span></span>
|
||||
<i class="fa fa-chevron-down"></i>
|
||||
</div>
|
||||
<input type="hidden" name="version">
|
||||
<ul class="dropdown-menu">
|
||||
<!-- Versions will be added dynamically in version-picker.js -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 class="menu-title">{{ book_title }}</h1>
|
||||
@@ -321,4 +309,4 @@
|
||||
{{/if}}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
@@ -1,78 +0,0 @@
|
||||
.version-picker {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.version-picker .dropdown {
|
||||
width: 130px;
|
||||
max-height: 29px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
color: var(--fg);
|
||||
height: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
.version-picker .dropdown .select {
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
padding: 5px 2px 5px 15px;
|
||||
}
|
||||
.version-picker .dropdown .select > i {
|
||||
font-size: 10px;
|
||||
color: var(--fg);
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
line-height: 20px !important;
|
||||
}
|
||||
.version-picker .dropdown:hover {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
}
|
||||
.version-picker .dropdown:active {
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active:hover,
|
||||
.version-picker .dropdown.active {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 2px 2px 0 0;
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active .select > i {
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-popup-bg);
|
||||
width: 100%;
|
||||
left: -1px;
|
||||
right: 1px;
|
||||
margin-top: 1px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 0 0 4px 4px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 9;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li {
|
||||
font-size: 12px;
|
||||
padding: 6px 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li:hover {
|
||||
background-color: var(--theme-hover);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li.active::before {
|
||||
display: inline-block;
|
||||
content: "✓";
|
||||
margin-inline-start: -14px;
|
||||
width: 14px;
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
|
||||
const dropdown = document.querySelector('.version-picker .dropdown');
|
||||
const dropdownMenu = dropdown.querySelector('.dropdown-menu');
|
||||
|
||||
fetchVersions(dropdown, dropdownMenu).then(() => {
|
||||
initializeVersionDropdown(dropdown, dropdownMenu);
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the dropdown functionality for version selection.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
*/
|
||||
function initializeVersionDropdown(dropdown, dropdownMenu) {
|
||||
// Toggle the dropdown menu on click
|
||||
dropdown.addEventListener('click', function () {
|
||||
this.setAttribute('tabindex', 1);
|
||||
this.classList.toggle('active');
|
||||
dropdownMenu.style.display = (dropdownMenu.style.display === 'block') ? 'none' : 'block';
|
||||
});
|
||||
|
||||
// Remove the 'active' class and hide the dropdown menu on focusout
|
||||
dropdown.addEventListener('focusout', function () {
|
||||
this.classList.remove('active');
|
||||
dropdownMenu.style.display = 'none';
|
||||
});
|
||||
|
||||
// Handle item selection within the dropdown menu
|
||||
const dropdownMenuItems = dropdownMenu.querySelectorAll('li');
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.addEventListener('click', function () {
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.classList.remove('active');
|
||||
});
|
||||
this.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = this.textContent;
|
||||
dropdown.querySelector('input').value = this.getAttribute('id');
|
||||
|
||||
window.location.href = changeVersion(window.location.href, this.textContent);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function fetches the available versions from a GitHub repository
|
||||
* and inserts them into the version picker.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
* @returns {Promise<Array<string>>} A promise that resolves with an array of available versions.
|
||||
*/
|
||||
function fetchVersions(dropdown, dropdownMenu) {
|
||||
return new Promise((resolve, reject) => {
|
||||
window.addEventListener("load", () => {
|
||||
|
||||
fetch("https://api.github.com/repos/matrix-org/synapse/git/trees/gh-pages", {
|
||||
cache: "force-cache",
|
||||
}).then(res =>
|
||||
res.json()
|
||||
).then(resObject => {
|
||||
const excluded = ['dev-docs', 'v1.91.0', 'v1.80.0', 'v1.69.0'];
|
||||
const tree = resObject.tree.filter(item => item.type === "tree" && !excluded.includes(item.path));
|
||||
const versions = tree.map(item => item.path).sort(sortVersions);
|
||||
|
||||
// Create a list of <li> items for versions
|
||||
versions.forEach((version) => {
|
||||
const li = document.createElement("li");
|
||||
li.textContent = version;
|
||||
li.id = version;
|
||||
|
||||
if (window.SYNAPSE_VERSION === version) {
|
||||
li.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = version;
|
||||
dropdown.querySelector('input').value = version;
|
||||
}
|
||||
|
||||
dropdownMenu.appendChild(li);
|
||||
});
|
||||
|
||||
resolve(versions);
|
||||
|
||||
}).catch(ex => {
|
||||
console.error("Failed to fetch version data", ex);
|
||||
reject(ex);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom sorting function to sort an array of version strings.
|
||||
*
|
||||
* @param {string} a - The first version string to compare.
|
||||
* @param {string} b - The second version string to compare.
|
||||
* @returns {number} - A negative number if a should come before b, a positive number if b should come before a, or 0 if they are equal.
|
||||
*/
|
||||
function sortVersions(a, b) {
|
||||
// Put 'develop' and 'latest' at the top
|
||||
if (a === 'develop' || a === 'latest') return -1;
|
||||
if (b === 'develop' || b === 'latest') return 1;
|
||||
|
||||
const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
|
||||
return versionB.localeCompare(versionA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the version in a URL path.
|
||||
*
|
||||
* @param {string} url - The original URL to be modified.
|
||||
* @param {string} newVersion - The new version to replace the existing version in the URL.
|
||||
* @returns {string} The updated URL with the new version.
|
||||
*/
|
||||
function changeVersion(url, newVersion) {
|
||||
const parsedURL = new URL(url);
|
||||
const pathSegments = parsedURL.pathname.split('/');
|
||||
|
||||
// Modify the version
|
||||
pathSegments[2] = newVersion;
|
||||
|
||||
// Reconstruct the URL
|
||||
parsedURL.pathname = pathSegments.join('/');
|
||||
|
||||
return parsedURL.href;
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
window.SYNAPSE_VERSION = 'v1.98';
|
||||
399
poetry.lock
generated
399
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "alabaster"
|
||||
@@ -454,34 +454,34 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "41.0.7"
|
||||
version = "41.0.5"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"},
|
||||
{file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"},
|
||||
{file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"},
|
||||
{file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"},
|
||||
{file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"},
|
||||
{file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"},
|
||||
{file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"},
|
||||
{file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"},
|
||||
{file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"},
|
||||
{file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"},
|
||||
{file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"},
|
||||
{file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"},
|
||||
{file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"},
|
||||
{file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"},
|
||||
{file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"},
|
||||
{file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"},
|
||||
{file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"},
|
||||
{file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"},
|
||||
{file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"},
|
||||
{file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"},
|
||||
{file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"},
|
||||
{file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"},
|
||||
{file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"},
|
||||
{file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"},
|
||||
{file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"},
|
||||
{file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"},
|
||||
{file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"},
|
||||
{file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"},
|
||||
{file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -712,13 +712,13 @@ idna = ">=2.5"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.6"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
|
||||
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
|
||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -981,13 +981,13 @@ i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.20.0"
|
||||
version = "4.19.1"
|
||||
description = "An implementation of JSON Schema validation for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"},
|
||||
{file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"},
|
||||
{file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"},
|
||||
{file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1611,13 +1611,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.13.26"
|
||||
version = "8.13.23"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "phonenumbers-8.13.26-py2.py3-none-any.whl", hash = "sha256:b2308c9c5750b8f10dd30d94547afd66bce60ac5e93aff227f95740557f32752"},
|
||||
{file = "phonenumbers-8.13.26.tar.gz", hash = "sha256:937d70aeceb317f5831dfec28de855a60260ef4a9d551964bec8e7a7d0cf81cd"},
|
||||
{file = "phonenumbers-8.13.23-py2.py3-none-any.whl", hash = "sha256:34d6cb279dd4a64714e324c71350f96e5bda3237be28d11b4c555c44701544cd"},
|
||||
{file = "phonenumbers-8.13.23.tar.gz", hash = "sha256:869e44fcaaf276eca6b953a401e2b27d57461f3a18a66cf5f13377e7bb0e228c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1729,13 +1729,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytes
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.19.0"
|
||||
version = "0.18.0"
|
||||
description = "Python client for the Prometheus monitoring system."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"},
|
||||
{file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"},
|
||||
{file = "prometheus_client-0.18.0-py3-none-any.whl", hash = "sha256:8de3ae2755f890826f4b6479e5571d4f74ac17a81345fe69a6778fdb92579184"},
|
||||
{file = "prometheus_client-0.18.0.tar.gz", hash = "sha256:35f7a8c22139e2bb7ca5a698e92d38145bc8dc74c1c0bf56f25cca886a764e17"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1792,13 +1792,13 @@ psycopg2 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.5.1"
|
||||
version = "0.5.0"
|
||||
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
files = [
|
||||
{file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"},
|
||||
{file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"},
|
||||
{file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"},
|
||||
{file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1828,18 +1828,18 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.5.1"
|
||||
version = "2.4.2"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-2.5.1-py3-none-any.whl", hash = "sha256:dc5244a8939e0d9a68f1f1b5f550b2e1c879912033b1becbedb315accc75441b"},
|
||||
{file = "pydantic-2.5.1.tar.gz", hash = "sha256:0b8be5413c06aadfbe56f6dc1d45c9ed25fd43264414c571135c97dd77c2bedb"},
|
||||
{file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"},
|
||||
{file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.4.0"
|
||||
pydantic-core = "2.14.3"
|
||||
pydantic-core = "2.10.1"
|
||||
typing-extensions = ">=4.6.1"
|
||||
|
||||
[package.extras]
|
||||
@@ -1847,116 +1847,117 @@ email = ["email-validator (>=2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.14.3"
|
||||
version = "2.10.1"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ba44fad1d114539d6a1509966b20b74d2dec9a5b0ee12dd7fd0a1bb7b8785e5f"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a70d23eedd88a6484aa79a732a90e36701048a1509078d1b59578ef0ea2cdf5"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cc24728a1a9cef497697e53b3d085fb4d3bc0ef1ef4d9b424d9cf808f52c146"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab4a2381005769a4af2ffddae74d769e8a4aae42e970596208ec6d615c6fb080"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:905a12bf088d6fa20e094f9a477bf84bd823651d8b8384f59bcd50eaa92e6a52"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38aed5a1bbc3025859f56d6a32f6e53ca173283cb95348e03480f333b1091e7d"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1767bd3f6370458e60c1d3d7b1d9c2751cc1ad743434e8ec84625a610c8b9195"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7cb0c397f29688a5bd2c0dbd44451bc44ebb9b22babc90f97db5ec3e5bb69977"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ff737f24b34ed26de62d481ef522f233d3c5927279f6b7229de9b0deb3f76b5"},
|
||||
{file = "pydantic_core-2.14.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1a39fecb5f0b19faee9a8a8176c805ed78ce45d760259a4ff3d21a7daa4dfc1"},
|
||||
{file = "pydantic_core-2.14.3-cp310-none-win32.whl", hash = "sha256:ccbf355b7276593c68fa824030e68cb29f630c50e20cb11ebb0ee450ae6b3d08"},
|
||||
{file = "pydantic_core-2.14.3-cp310-none-win_amd64.whl", hash = "sha256:536e1f58419e1ec35f6d1310c88496f0d60e4f182cacb773d38076f66a60b149"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:f1f46700402312bdc31912f6fc17f5ecaaaa3bafe5487c48f07c800052736289"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:88ec906eb2d92420f5b074f59cf9e50b3bb44f3cb70e6512099fdd4d88c2f87c"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:056ea7cc3c92a7d2a14b5bc9c9fa14efa794d9f05b9794206d089d06d3433dc7"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076edc972b68a66870cec41a4efdd72a6b655c4098a232314b02d2bfa3bfa157"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e71f666c3bf019f2490a47dddb44c3ccea2e69ac882f7495c68dc14d4065eac2"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f518eac285c9632be337323eef9824a856f2680f943a9b68ac41d5f5bad7df7c"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dbab442a8d9ca918b4ed99db8d89d11b1f067a7dadb642476ad0889560dac79"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0653fb9fc2fa6787f2fa08631314ab7fc8070307bd344bf9471d1b7207c24623"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c54af5069da58ea643ad34ff32fd6bc4eebb8ae0fef9821cd8919063e0aeeaab"},
|
||||
{file = "pydantic_core-2.14.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc956f78651778ec1ab105196e90e0e5f5275884793ab67c60938c75bcca3989"},
|
||||
{file = "pydantic_core-2.14.3-cp311-none-win32.whl", hash = "sha256:5b73441a1159f1fb37353aaefb9e801ab35a07dd93cb8177504b25a317f4215a"},
|
||||
{file = "pydantic_core-2.14.3-cp311-none-win_amd64.whl", hash = "sha256:7349f99f1ef8b940b309179733f2cad2e6037a29560f1b03fdc6aa6be0a8d03c"},
|
||||
{file = "pydantic_core-2.14.3-cp311-none-win_arm64.whl", hash = "sha256:ec79dbe23702795944d2ae4c6925e35a075b88acd0d20acde7c77a817ebbce94"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8f5624f0f67f2b9ecaa812e1dfd2e35b256487566585160c6c19268bf2ffeccc"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6c2d118d1b6c9e2d577e215567eedbe11804c3aafa76d39ec1f8bc74e918fd07"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe863491664c6720d65ae438d4efaa5eca766565a53adb53bf14bc3246c72fe0"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:136bc7247e97a921a020abbd6ef3169af97569869cd6eff41b6a15a73c44ea9b"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aeafc7f5bbddc46213707266cadc94439bfa87ecf699444de8be044d6d6eb26f"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16aaf788f1de5a85c8f8fcc9c1ca1dd7dd52b8ad30a7889ca31c7c7606615b8"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc652c354d3362e2932a79d5ac4bbd7170757a41a62c4fe0f057d29f10bebb"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b92e72babfd56585c75caf44f0b15258c58e6be23bc33f90885cebffde3400"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:75f3f534f33651b73f4d3a16d0254de096f43737d51e981478d580f4b006b427"},
|
||||
{file = "pydantic_core-2.14.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c9ffd823c46e05ef3eb28b821aa7bc501efa95ba8880b4a1380068e32c5bed47"},
|
||||
{file = "pydantic_core-2.14.3-cp312-none-win32.whl", hash = "sha256:12e05a76b223577a4696c76d7a6b36a0ccc491ffb3c6a8cf92d8001d93ddfd63"},
|
||||
{file = "pydantic_core-2.14.3-cp312-none-win_amd64.whl", hash = "sha256:1582f01eaf0537a696c846bea92082082b6bfc1103a88e777e983ea9fbdc2a0f"},
|
||||
{file = "pydantic_core-2.14.3-cp312-none-win_arm64.whl", hash = "sha256:96fb679c7ca12a512d36d01c174a4fbfd912b5535cc722eb2c010c7b44eceb8e"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:71ed769b58d44e0bc2701aa59eb199b6665c16e8a5b8b4a84db01f71580ec448"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:5402ee0f61e7798ea93a01b0489520f2abfd9b57b76b82c93714c4318c66ca06"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaab9dc009e22726c62fe3b850b797e7f0e7ba76d245284d1064081f512c7226"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92486a04d54987054f8b4405a9af9d482e5100d6fe6374fc3303015983fc8bda"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf08b43d1d5d1678f295f0431a4a7e1707d4652576e1d0f8914b5e0213bfeee5"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8ca13480ce16daad0504be6ce893b0ee8ec34cd43b993b754198a89e2787f7e"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44afa3c18d45053fe8d8228950ee4c8eaf3b5a7f3b64963fdeac19b8342c987f"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56814b41486e2d712a8bc02a7b1f17b87fa30999d2323bbd13cf0e52296813a1"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3dc2920cc96f9aa40c6dc54256e436cc95c0a15562eb7bd579e1811593c377e"},
|
||||
{file = "pydantic_core-2.14.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e483b8b913fcd3b48badec54185c150cb7ab0e6487914b84dc7cde2365e0c892"},
|
||||
{file = "pydantic_core-2.14.3-cp37-none-win32.whl", hash = "sha256:364dba61494e48f01ef50ae430e392f67ee1ee27e048daeda0e9d21c3ab2d609"},
|
||||
{file = "pydantic_core-2.14.3-cp37-none-win_amd64.whl", hash = "sha256:a402ae1066be594701ac45661278dc4a466fb684258d1a2c434de54971b006ca"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:10904368261e4509c091cbcc067e5a88b070ed9a10f7ad78f3029c175487490f"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:260692420028319e201b8649b13ac0988974eeafaaef95d0dfbf7120c38dc000"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1bf1a7b05a65d3b37a9adea98e195e0081be6b17ca03a86f92aeb8b110f468"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7abd17a838a52140e3aeca271054e321226f52df7e0a9f0da8f91ea123afe98"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5c51460ede609fbb4fa883a8fe16e749964ddb459966d0518991ec02eb8dfb9"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d06c78074646111fb01836585f1198367b17d57c9f427e07aaa9ff499003e58d"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af452e69446fadf247f18ac5d153b1f7e61ef708f23ce85d8c52833748c58075"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3ad4968711fb379a67c8c755beb4dae8b721a83737737b7bcee27c05400b047"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c5ea0153482e5b4d601c25465771c7267c99fddf5d3f3bdc238ef930e6d051cf"},
|
||||
{file = "pydantic_core-2.14.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96eb10ef8920990e703da348bb25fedb8b8653b5966e4e078e5be382b430f9e0"},
|
||||
{file = "pydantic_core-2.14.3-cp38-none-win32.whl", hash = "sha256:ea1498ce4491236d1cffa0eee9ad0968b6ecb0c1cd711699c5677fc689905f00"},
|
||||
{file = "pydantic_core-2.14.3-cp38-none-win_amd64.whl", hash = "sha256:2bc736725f9bd18a60eec0ed6ef9b06b9785454c8d0105f2be16e4d6274e63d0"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:1ea992659c03c3ea811d55fc0a997bec9dde863a617cc7b25cfde69ef32e55af"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2b53e1f851a2b406bbb5ac58e16c4a5496038eddd856cc900278fa0da97f3fc"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c7f8e8a7cf8e81ca7d44bea4f181783630959d41b4b51d2f74bc50f348a090f"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3b9c91eeb372a64ec6686c1402afd40cc20f61a0866850f7d989b6bf39a41a"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ef3e2e407e4cad2df3c89488a761ed1f1c33f3b826a2ea9a411b0a7d1cccf1b"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f86f20a9d5bee1a6ede0f2757b917bac6908cde0f5ad9fcb3606db1e2968bcf5"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61beaa79d392d44dc19d6f11ccd824d3cccb865c4372157c40b92533f8d76dd0"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d41df8e10b094640a6b234851b624b76a41552f637b9fb34dc720b9fe4ef3be4"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c08ac60c3caa31f825b5dbac47e4875bd4954d8f559650ad9e0b225eaf8ed0c"},
|
||||
{file = "pydantic_core-2.14.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d8b3932f1a369364606417ded5412c4ffb15bedbcf797c31317e55bd5d920e"},
|
||||
{file = "pydantic_core-2.14.3-cp39-none-win32.whl", hash = "sha256:caa94726791e316f0f63049ee00dff3b34a629b0d099f3b594770f7d0d8f1f56"},
|
||||
{file = "pydantic_core-2.14.3-cp39-none-win_amd64.whl", hash = "sha256:2494d20e4c22beac30150b4be3b8339bf2a02ab5580fa6553ca274bc08681a65"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:fe272a72c7ed29f84c42fedd2d06c2f9858dc0c00dae3b34ba15d6d8ae0fbaaf"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7e63a56eb7fdee1587d62f753ccd6d5fa24fbeea57a40d9d8beaef679a24bdd6"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7692f539a26265cece1e27e366df5b976a6db6b1f825a9e0466395b314ee48b"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af46f0b7a1342b49f208fed31f5a83b8495bb14b652f621e0a6787d2f10f24ee"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e2f9d76c00e805d47f19c7a96a14e4135238a7551a18bfd89bb757993fd0933"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:de52ddfa6e10e892d00f747bf7135d7007302ad82e243cf16d89dd77b03b649d"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:38113856c7fad8c19be7ddd57df0c3e77b1b2336459cb03ee3903ce9d5e236ce"},
|
||||
{file = "pydantic_core-2.14.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:354db020b1f8f11207b35360b92d95725621eb92656725c849a61e4b550f4acc"},
|
||||
{file = "pydantic_core-2.14.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:76fc18653a5c95e5301a52d1b5afb27c9adc77175bf00f73e94f501caf0e05ad"},
|
||||
{file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2646f8270f932d79ba61102a15ea19a50ae0d43b314e22b3f8f4b5fabbfa6e38"},
|
||||
{file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37dad73a2f82975ed563d6a277fd9b50e5d9c79910c4aec787e2d63547202315"},
|
||||
{file = "pydantic_core-2.14.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:113752a55a8eaece2e4ac96bc8817f134c2c23477e477d085ba89e3aa0f4dc44"},
|
||||
{file = "pydantic_core-2.14.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:8488e973547e8fb1b4193fd9faf5236cf1b7cd5e9e6dc7ff6b4d9afdc4c720cb"},
|
||||
{file = "pydantic_core-2.14.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3d1dde10bd9962b1434053239b1d5490fc31a2b02d8950a5f731bc584c7a5a0f"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2c83892c7bf92b91d30faca53bb8ea21f9d7e39f0ae4008ef2c2f91116d0464a"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:849cff945284c577c5f621d2df76ca7b60f803cc8663ff01b778ad0af0e39bb9"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa89919fbd8a553cd7d03bf23d5bc5deee622e1b5db572121287f0e64979476"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf15145b1f8056d12c67255cd3ce5d317cd4450d5ee747760d8d088d85d12a2d"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4cc6bb11f4e8e5ed91d78b9880774fbc0856cb226151b0a93b549c2b26a00c19"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:832d16f248ca0cc96929139734ec32d21c67669dcf8a9f3f733c85054429c012"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b02b5e1f54c3396c48b665050464803c23c685716eb5d82a1d81bf81b5230da4"},
|
||||
{file = "pydantic_core-2.14.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1f2d4516c32255782153e858f9a900ca6deadfb217fd3fb21bb2b60b4e04d04d"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0a3e51c2be472b7867eb0c5d025b91400c2b73a0823b89d4303a9097e2ec6655"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:df33902464410a1f1a0411a235f0a34e7e129f12cb6340daca0f9d1390f5fe10"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27828f0227b54804aac6fb077b6bb48e640b5435fdd7fbf0c274093a7b78b69c"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2979dc80246e18e348de51246d4c9b410186ffa3c50e77924bec436b1e36cb"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b28996872b48baf829ee75fa06998b607c66a4847ac838e6fd7473a6b2ab68e7"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ca55c9671bb637ce13d18ef352fd32ae7aba21b4402f300a63f1fb1fd18e0364"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:aecd5ed096b0e5d93fb0367fd8f417cef38ea30b786f2501f6c34eabd9062c38"},
|
||||
{file = "pydantic_core-2.14.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:44aaf1a07ad0824e407dafc637a852e9a44d94664293bbe7d8ee549c356c8882"},
|
||||
{file = "pydantic_core-2.14.3.tar.gz", hash = "sha256:3ad083df8fe342d4d8d00cc1d3c1a23f0dc84fce416eb301e69f1ddbbe124d3f"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"},
|
||||
{file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"},
|
||||
{file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"},
|
||||
{file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"},
|
||||
{file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"},
|
||||
{file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"},
|
||||
{file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"},
|
||||
{file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"},
|
||||
{file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"},
|
||||
{file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"},
|
||||
{file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"},
|
||||
{file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"},
|
||||
{file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"},
|
||||
{file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"},
|
||||
{file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"},
|
||||
{file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"},
|
||||
{file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"},
|
||||
{file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"},
|
||||
{file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"},
|
||||
{file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"},
|
||||
{file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"},
|
||||
{file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"},
|
||||
{file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"},
|
||||
{file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"},
|
||||
{file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"},
|
||||
{file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2080,20 +2081,20 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyopenssl"
|
||||
version = "23.3.0"
|
||||
version = "23.2.0"
|
||||
description = "Python wrapper module around the OpenSSL library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"},
|
||||
{file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"},
|
||||
{file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"},
|
||||
{file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=41.0.5,<42"
|
||||
cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"]
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"]
|
||||
test = ["flaky", "pretend", "pytest (>=3.0.1)"]
|
||||
|
||||
[[package]]
|
||||
@@ -2272,13 +2273,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-toolbelt"
|
||||
version = "1.0.0"
|
||||
version = "0.10.1"
|
||||
description = "A utility belt for advanced users of python-requests"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
|
||||
{file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
|
||||
{file = "requests-toolbelt-0.10.1.tar.gz", hash = "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d"},
|
||||
{file = "requests_toolbelt-0.10.1-py2.py3-none-any.whl", hash = "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2425,28 +2426,28 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.1.6"
|
||||
version = "0.1.4"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"},
|
||||
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"},
|
||||
{file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"},
|
||||
{file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"},
|
||||
{file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"},
|
||||
{file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"},
|
||||
{file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"},
|
||||
{file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"},
|
||||
{file = "ruff-0.1.4-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:864958706b669cce31d629902175138ad8a069d99ca53514611521f532d91495"},
|
||||
{file = "ruff-0.1.4-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9fdd61883bb34317c788af87f4cd75dfee3a73f5ded714b77ba928e418d6e39e"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4eaca8c9cc39aa7f0f0d7b8fe24ecb51232d1bb620fc4441a61161be4a17539"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9a1301dc43cbf633fb603242bccd0aaa34834750a14a4c1817e2e5c8d60de17"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e8db8ab6f100f02e28b3d713270c857d370b8d61871d5c7d1702ae411df683"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:80fea754eaae06335784b8ea053d6eb8e9aac75359ebddd6fee0858e87c8d510"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bc02a480d4bfffd163a723698da15d1a9aec2fced4c06f2a753f87f4ce6969c"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862811b403063765b03e716dac0fda8fdbe78b675cd947ed5873506448acea4"},
|
||||
{file = "ruff-0.1.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58826efb8b3efbb59bb306f4b19640b7e366967a31c049d49311d9eb3a4c60cb"},
|
||||
{file = "ruff-0.1.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fdfd453fc91d9d86d6aaa33b1bafa69d114cf7421057868f0b79104079d3e66e"},
|
||||
{file = "ruff-0.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e8791482d508bd0b36c76481ad3117987301b86072158bdb69d796503e1c84a8"},
|
||||
{file = "ruff-0.1.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01206e361021426e3c1b7fba06ddcb20dbc5037d64f6841e5f2b21084dc51800"},
|
||||
{file = "ruff-0.1.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:645591a613a42cb7e5c2b667cbefd3877b21e0252b59272ba7212c3d35a5819f"},
|
||||
{file = "ruff-0.1.4-py3-none-win32.whl", hash = "sha256:99908ca2b3b85bffe7e1414275d004917d1e0dfc99d497ccd2ecd19ad115fd0d"},
|
||||
{file = "ruff-0.1.4-py3-none-win_amd64.whl", hash = "sha256:1dfd6bf8f6ad0a4ac99333f437e0ec168989adc5d837ecd38ddb2cc4a2e3db8a"},
|
||||
{file = "ruff-0.1.4-py3-none-win_arm64.whl", hash = "sha256:d98ae9ebf56444e18a3e3652b3383204748f73e247dea6caaf8b52d37e6b32da"},
|
||||
{file = "ruff-0.1.4.tar.gz", hash = "sha256:21520ecca4cc555162068d87c747b8f95e1e95f8ecfcbbe59e8dd00710586315"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2481,13 +2482,13 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "1.35.0"
|
||||
version = "1.32.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "sentry-sdk-1.35.0.tar.gz", hash = "sha256:04e392db9a0d59bd49a51b9e3a92410ac5867556820465057c2ef89a38e953e9"},
|
||||
{file = "sentry_sdk-1.35.0-py2.py3-none-any.whl", hash = "sha256:a7865952701e46d38b41315c16c075367675c48d049b90a4cc2e41991ebc7efa"},
|
||||
{file = "sentry-sdk-1.32.0.tar.gz", hash = "sha256:935e8fbd7787a3702457393b74b13d89a5afb67185bc0af85c00cb27cbd42e7c"},
|
||||
{file = "sentry_sdk-1.32.0-py2.py3-none-any.whl", hash = "sha256:eeb0b3550536f3bbc05bb1c7e0feb3a78d74acb43b607159a606ed2ec0a33a4d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2691,17 +2692,17 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-autodoc2"
|
||||
version = "0.5.0"
|
||||
version = "0.4.2"
|
||||
description = "Analyse a python project and create documentation for it."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "sphinx_autodoc2-0.5.0-py3-none-any.whl", hash = "sha256:e867013b1512f9d6d7e6f6799f8b537d6884462acd118ef361f3f619a60b5c9e"},
|
||||
{file = "sphinx_autodoc2-0.5.0.tar.gz", hash = "sha256:7d76044aa81d6af74447080182b6868c7eb066874edc835e8ddf810735b6565a"},
|
||||
{file = "sphinx-autodoc2-0.4.2.tar.gz", hash = "sha256:06da226a25a4339e173b34bb0e590e0ba9b4570b414796140aee1939d09acb3a"},
|
||||
{file = "sphinx_autodoc2-0.4.2-py3-none-any.whl", hash = "sha256:00835ba8c980b9c510ea794c3e2060e5a254a74c6c22badc9bfd3642dc1034b4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=2.7,<4"
|
||||
astroid = ">=2.7"
|
||||
tomli = {version = "*", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = "*"
|
||||
|
||||
@@ -2709,7 +2710,7 @@ typing-extensions = "*"
|
||||
cli = ["typer[all]"]
|
||||
docs = ["furo", "myst-parser", "sphinx (>=4.0.0)"]
|
||||
sphinx = ["sphinx (>=4.0.0)"]
|
||||
testing = ["pytest", "pytest-cov", "pytest-regressions", "sphinx (>=4.0.0,<7)"]
|
||||
testing = ["pytest", "pytest-cov", "pytest-regressions", "sphinx (>=4.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-basic-ng"
|
||||
@@ -3054,13 +3055,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-jsonschema"
|
||||
version = "4.20.0.0"
|
||||
version = "4.19.0.4"
|
||||
description = "Typing stubs for jsonschema"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-jsonschema-4.20.0.0.tar.gz", hash = "sha256:0de1032d243f1d3dba8b745ad84efe8c1af71665a9deb1827636ac535dcb79c1"},
|
||||
{file = "types_jsonschema-4.20.0.0-py3-none-any.whl", hash = "sha256:e6d5df18aaca4412f0aae246a294761a92040e93d7bc840f002b7329a8b72d26"},
|
||||
{file = "types-jsonschema-4.19.0.4.tar.gz", hash = "sha256:994feb6632818259c4b5dbd733867824cb475029a6abc2c2b5201a2268b6e7d2"},
|
||||
{file = "types_jsonschema-4.19.0.4-py3-none-any.whl", hash = "sha256:b73c3f4ba3cd8108602d1198a438e2698d5eb6b9db206ed89a33e24729b0abe7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3090,24 +3091,24 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "10.1.0.2"
|
||||
version = "10.1.0.0"
|
||||
description = "Typing stubs for Pillow"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "types-Pillow-10.1.0.2.tar.gz", hash = "sha256:525c1c5ee67b0ac1721c40d2bc618226ef2123c347e527e14e05b920721a13b9"},
|
||||
{file = "types_Pillow-10.1.0.2-py3-none-any.whl", hash = "sha256:131078ffa547bf9a201d39ffcdc65633e108148085f4f1b07d4647fcfec6e923"},
|
||||
{file = "types-Pillow-10.1.0.0.tar.gz", hash = "sha256:0f5e7cf010ed226800cb5821e87781e5d0e81257d948a9459baa74a8c8b7d822"},
|
||||
{file = "types_Pillow-10.1.0.0-py3-none-any.whl", hash = "sha256:f97f596b6a39ddfd26da3eb67421062193e10732d2310f33898d36f9694331b5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-psycopg2"
|
||||
version = "2.9.21.16"
|
||||
version = "2.9.21.15"
|
||||
description = "Typing stubs for psycopg2"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"},
|
||||
{file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"},
|
||||
{file = "types-psycopg2-2.9.21.15.tar.gz", hash = "sha256:cf99b62ab32cd4ef412fc3c4da1c29ca5a130847dff06d709b84a523802406f0"},
|
||||
{file = "types_psycopg2-2.9.21.15-py3-none-any.whl", hash = "sha256:cc80479def02e4dd1ef21649d82f04426c73bc0693bcc0a8b5223c7c168472af"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3151,13 +3152,13 @@ urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "68.2.0.2"
|
||||
version = "68.2.0.0"
|
||||
description = "Typing stubs for setuptools"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-setuptools-68.2.0.2.tar.gz", hash = "sha256:09efc380ad5c7f78e30bca1546f706469568cf26084cfab73ecf83dea1d28446"},
|
||||
{file = "types_setuptools-68.2.0.2-py3-none-any.whl", hash = "sha256:d5b5ff568ea2474eb573dcb783def7dadfd9b1ff638bb653b3c7051ce5aeb6d1"},
|
||||
{file = "types-setuptools-68.2.0.0.tar.gz", hash = "sha256:a4216f1e2ef29d089877b3af3ab2acf489eb869ccaf905125c69d2dc3932fd85"},
|
||||
{file = "types_setuptools-68.2.0.0-py3-none-any.whl", hash = "sha256:77edcc843e53f8fc83bb1a840684841f3dc804ec94562623bfa2ea70d5a2ba1b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3432,4 +3433,4 @@ user-search = ["pyicu"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8.0"
|
||||
content-hash = "57716a9580b3493c3d2038492a6d4c36d1d16a79c5a0880b6eadcaf681503d3a"
|
||||
content-hash = "369455d6a67753a6bcfbad3cd86801b1dd02896d0180080e2ba9501e007353ec"
|
||||
|
||||
@@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.98.0"
|
||||
version = "1.96.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -321,7 +321,7 @@ all = [
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.7.0"
|
||||
ruff = "0.1.6"
|
||||
ruff = "0.1.4"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
@@ -370,7 +370,7 @@ optional = true
|
||||
|
||||
[tool.poetry.group.dev-docs.dependencies]
|
||||
sphinx = {version = "^6.1", python = "^3.8"}
|
||||
sphinx-autodoc2 = {version = ">=0.4.2,<0.6.0", python = "^3.8"}
|
||||
sphinx-autodoc2 = {version = "^0.4.2", python = "^3.8"}
|
||||
myst-parser = {version = "^1.0.0", python = "^3.8"}
|
||||
furo = ">=2022.12.7,<2024.0.0"
|
||||
|
||||
@@ -382,7 +382,7 @@ furo = ">=2022.12.7,<2024.0.0"
|
||||
# runtime errors caused by build system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=1.1.0,<=1.8.1", "setuptools_rust>=1.3,<=1.8.1"]
|
||||
requires = ["poetry-core>=1.1.0,<=1.7.0", "setuptools_rust>=1.3,<=1.8.1"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
|
||||
@@ -25,14 +25,14 @@ name = "synapse.synapse_rust"
|
||||
anyhow = "1.0.63"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
pyo3 = { version = "0.20.0", features = [
|
||||
pyo3 = { version = "0.19.2", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
"abi3",
|
||||
"abi3-py38",
|
||||
] }
|
||||
pyo3-log = "0.9.0"
|
||||
pythonize = "0.20.0"
|
||||
pyo3-log = "0.8.1"
|
||||
pythonize = "0.19.0"
|
||||
regex = "1.6.0"
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
serde_json = "1.0.85"
|
||||
|
||||
@@ -296,7 +296,8 @@ impl<'source> FromPyObject<'source> for JsonValue {
|
||||
match l.iter().map(SimpleJsonValue::extract).collect() {
|
||||
Ok(a) => Ok(JsonValue::Array(a)),
|
||||
Err(e) => Err(PyTypeError::new_err(format!(
|
||||
"Can't convert to JsonValue::Array: {e}"
|
||||
"Can't convert to JsonValue::Array: {}",
|
||||
e
|
||||
))),
|
||||
}
|
||||
} else if let Ok(v) = SimpleJsonValue::extract(ob) {
|
||||
|
||||
@@ -1,181 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""A script to calculate which versions of Synapse have backwards-compatible
|
||||
database schemas. It creates a Markdown table of Synapse versions and the earliest
|
||||
compatible version.
|
||||
|
||||
It is compatible with the mdbook protocol for preprocessors (see
|
||||
https://rust-lang.github.io/mdBook/for_developers/preprocessors.html#implementing-a-preprocessor-with-a-different-language):
|
||||
|
||||
Exit 0 to denote support for all renderers:
|
||||
|
||||
./scripts-dev/schema_versions.py supports <mdbook renderer>
|
||||
|
||||
Parse a JSON list from stdin and add the table to the proper documetnation page:
|
||||
|
||||
./scripts-dev/schema_versions.py
|
||||
|
||||
Additionally, the script supports dumping the table to stdout for debugging:
|
||||
|
||||
./scripts-dev/schema_versions.py dump
|
||||
"""
|
||||
|
||||
import io
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict, Iterator, Optional, Tuple
|
||||
|
||||
import git
|
||||
from packaging import version
|
||||
|
||||
# The schema version has moved around over the years.
|
||||
SCHEMA_VERSION_FILES = (
|
||||
"synapse/storage/schema/__init__.py",
|
||||
"synapse/storage/prepare_database.py",
|
||||
"synapse/storage/__init__.py",
|
||||
"synapse/app/homeserver.py",
|
||||
)
|
||||
|
||||
|
||||
# Skip versions of Synapse < v1.0, they're old and essentially not
|
||||
# compatible with today's federation.
|
||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||
|
||||
|
||||
def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
"""Get the schema and schema compat versions for a tag."""
|
||||
schema_version = None
|
||||
schema_compat_version = None
|
||||
|
||||
for file in SCHEMA_VERSION_FILES:
|
||||
try:
|
||||
schema_file = tag.commit.tree / file
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
# We (usually) can't execute the code since it might have unknown imports.
|
||||
if file != "synapse/storage/schema/__init__.py":
|
||||
with io.BytesIO(schema_file.data_stream.read()) as f:
|
||||
for line in f.readlines():
|
||||
if line.startswith(b"SCHEMA_VERSION"):
|
||||
schema_version = int(line.split()[2])
|
||||
|
||||
# Bail early.
|
||||
break
|
||||
else:
|
||||
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
|
||||
# thing to do is exec the code. Luckily it has only ever existed in
|
||||
# a file which imports nothing else from Synapse.
|
||||
locals: Dict[str, Any] = {}
|
||||
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
|
||||
schema_version = locals["SCHEMA_VERSION"]
|
||||
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")
|
||||
|
||||
return schema_version, schema_compat_version
|
||||
|
||||
|
||||
def get_tags(repo: git.Repo) -> Iterator[git.Tag]:
|
||||
"""Return an iterator of tags sorted by version."""
|
||||
tags = []
|
||||
for tag in repo.tags:
|
||||
# All "real" Synapse tags are of the form vX.Y.Z.
|
||||
if not tag.name.startswith("v"):
|
||||
continue
|
||||
|
||||
# There's a weird tag from the initial react UI.
|
||||
if tag.name == "v0.1":
|
||||
continue
|
||||
|
||||
try:
|
||||
tag_version = version.parse(tag.name)
|
||||
except version.InvalidVersion:
|
||||
# Skip invalid versions.
|
||||
continue
|
||||
|
||||
# Skip pre- and post-release versions.
|
||||
if tag_version.is_prerelease or tag_version.is_postrelease or tag_version.local:
|
||||
continue
|
||||
|
||||
# Skip old versions.
|
||||
if tag_version < OLDEST_SHOWN_VERSION:
|
||||
continue
|
||||
|
||||
tags.append((tag_version, tag))
|
||||
|
||||
# Sort based on the version number (not lexically).
|
||||
return (tag for _, tag in sorted(tags, key=lambda t: t[0]))
|
||||
|
||||
|
||||
def calculate_version_chart() -> str:
|
||||
repo = git.Repo(path=".")
|
||||
|
||||
# Map of schema version -> Synapse versions which are at that schema version.
|
||||
schema_versions = defaultdict(list)
|
||||
# Map of schema version -> Synapse versions which are compatible with that
|
||||
# schema version.
|
||||
schema_compat_versions = defaultdict(list)
|
||||
|
||||
# Find ranges of versions which are compatible with a schema version.
|
||||
#
|
||||
# There are two modes of operation:
|
||||
#
|
||||
# 1. Pre-schema_compat_version (i.e. schema_compat_version of None), then
|
||||
# Synapse is compatible up/downgrading to a version with
|
||||
# schema_version >= its current version.
|
||||
#
|
||||
# 2. Post-schema_compat_version (i.e. schema_compat_version is *not* None),
|
||||
# then Synapse is compatible up/downgrading to a version with
|
||||
# schema version >= schema_compat_version.
|
||||
#
|
||||
# This is more generous and avoids versions that cannot be rolled back.
|
||||
#
|
||||
# See https://github.com/matrix-org/synapse/pull/9933 which was included in v1.37.0.
|
||||
for tag in get_tags(repo):
|
||||
schema_version, schema_compat_version = get_schema_versions(tag)
|
||||
|
||||
# If a schema compat version is given, prefer that over the schema version.
|
||||
schema_versions[schema_version].append(tag.name)
|
||||
schema_compat_versions[schema_compat_version or schema_version].append(tag.name)
|
||||
|
||||
# Generate a table which maps the latest Synapse version compatible with each
|
||||
# schema version.
|
||||
result = f"| {'Versions': ^19} | Compatible version |\n"
|
||||
result += f"|{'-' * (19 + 2)}|{'-' * (18 + 2)}|\n"
|
||||
for schema_version, synapse_versions in schema_compat_versions.items():
|
||||
result += f"| {synapse_versions[0] + ' – ' + synapse_versions[-1]: ^19} | {schema_versions[schema_version][0]: ^18} |\n"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) == 3 and sys.argv[1] == "supports":
|
||||
# We don't care about the renderer which is being used, which is the second argument.
|
||||
sys.exit(0)
|
||||
elif len(sys.argv) == 2 and sys.argv[1] == "dump":
|
||||
print(calculate_version_chart())
|
||||
else:
|
||||
# Expect JSON data on stdin.
|
||||
context, book = json.load(sys.stdin)
|
||||
|
||||
for section in book["sections"]:
|
||||
if "Chapter" in section and section["Chapter"]["path"] == "upgrade.md":
|
||||
section["Chapter"]["content"] = section["Chapter"]["content"].replace(
|
||||
"<!-- REPLACE_WITH_SCHEMA_VERSIONS -->", calculate_version_chart()
|
||||
)
|
||||
|
||||
# Print the result back out to stdout.
|
||||
print(json.dumps(book))
|
||||
@@ -27,8 +27,6 @@ from synapse.api.errors import (
|
||||
UnstableSpecAuthError,
|
||||
)
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.types import Requester, create_requester
|
||||
from synapse.util.cancellation import cancellable
|
||||
@@ -47,9 +45,6 @@ class BaseAuth:
|
||||
self.store = hs.get_datastores().main
|
||||
self._storage_controllers = hs.get_storage_controllers()
|
||||
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips
|
||||
|
||||
async def check_user_in_room(
|
||||
self,
|
||||
room_id: str,
|
||||
@@ -354,46 +349,3 @@ class BaseAuth:
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
)
|
||||
|
||||
async def _record_request(
|
||||
self, request: SynapseRequest, requester: Requester
|
||||
) -> None:
|
||||
"""Record that this request was made.
|
||||
|
||||
This updates the client_ips and monthly_active_user tables.
|
||||
"""
|
||||
ip_addr = request.get_client_ip_if_available()
|
||||
|
||||
if ip_addr and (not requester.app_service or self._track_appservice_user_ips):
|
||||
user_agent = get_request_user_agent(request)
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
# XXX(quenting): I'm 95% confident that we could skip setting the
|
||||
# device_id to "dummy-device" for appservices, and that the only impact
|
||||
# would be some rows which whould not deduplicate in the 'user_ips'
|
||||
# table during the transition
|
||||
recorded_device_id = (
|
||||
"dummy-device"
|
||||
if requester.device_id is None and requester.app_service is not None
|
||||
else requester.device_id
|
||||
)
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.authenticated_entity,
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=recorded_device_id,
|
||||
)
|
||||
|
||||
# Track also the puppeted user client IP if enabled and the user is puppeting
|
||||
if (
|
||||
requester.user.to_string() != requester.authenticated_entity
|
||||
and self._track_puppeted_user_ips
|
||||
):
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.user.to_string(),
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=requester.device_id,
|
||||
)
|
||||
|
||||
@@ -22,6 +22,7 @@ from synapse.api.errors import (
|
||||
InvalidClientTokenError,
|
||||
MissingClientTokenError,
|
||||
)
|
||||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
|
||||
from synapse.types import Requester, create_requester
|
||||
@@ -47,6 +48,8 @@ class InternalAuth(BaseAuth):
|
||||
self._account_validity_handler = hs.get_account_validity_handler()
|
||||
self._macaroon_generator = hs.get_macaroon_generator()
|
||||
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
@cancellable
|
||||
@@ -112,6 +115,9 @@ class InternalAuth(BaseAuth):
|
||||
Once get_user_by_req has set up the opentracing span, this does the actual work.
|
||||
"""
|
||||
try:
|
||||
ip_addr = request.get_client_ip_if_available()
|
||||
user_agent = get_request_user_agent(request)
|
||||
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
# First check if it could be a request from an appservice
|
||||
@@ -148,7 +154,38 @@ class InternalAuth(BaseAuth):
|
||||
errcode=Codes.EXPIRED_ACCOUNT,
|
||||
)
|
||||
|
||||
await self._record_request(request, requester)
|
||||
if ip_addr and (
|
||||
not requester.app_service or self._track_appservice_user_ips
|
||||
):
|
||||
# XXX(quenting): I'm 95% confident that we could skip setting the
|
||||
# device_id to "dummy-device" for appservices, and that the only impact
|
||||
# would be some rows which whould not deduplicate in the 'user_ips'
|
||||
# table during the transition
|
||||
recorded_device_id = (
|
||||
"dummy-device"
|
||||
if requester.device_id is None and requester.app_service is not None
|
||||
else requester.device_id
|
||||
)
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.authenticated_entity,
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=recorded_device_id,
|
||||
)
|
||||
|
||||
# Track also the puppeted user client IP if enabled and the user is puppeting
|
||||
if (
|
||||
requester.user.to_string() != requester.authenticated_entity
|
||||
and self._track_puppeted_user_ips
|
||||
):
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.user.to_string(),
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=requester.device_id,
|
||||
)
|
||||
|
||||
if requester.is_guest and not allow_guest:
|
||||
raise AuthError(
|
||||
|
||||
@@ -227,10 +227,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# so that we don't provision the user if they don't have enough permission:
|
||||
requester = await self.get_user_by_access_token(access_token, allow_expired)
|
||||
|
||||
# Do not record requests from MAS using the virtual `__oidc_admin` user.
|
||||
if access_token != self._admin_token:
|
||||
await self._record_request(request, requester)
|
||||
|
||||
if not allow_guest and requester.is_guest:
|
||||
raise OAuthInsufficientScopeError([SCOPE_MATRIX_API])
|
||||
|
||||
|
||||
@@ -419,7 +419,3 @@ class ExperimentalConfig(Config):
|
||||
self.msc4028_push_encrypted_events = experimental.get(
|
||||
"msc4028_push_encrypted_events", False
|
||||
)
|
||||
|
||||
self.msc4069_profile_inhibit_propagation = experimental.get(
|
||||
"msc4069_profile_inhibit_propagation", False
|
||||
)
|
||||
|
||||
@@ -48,7 +48,6 @@ class ServerNoticesConfig(Config):
|
||||
self.server_notices_mxid_display_name: Optional[str] = None
|
||||
self.server_notices_mxid_avatar_url: Optional[str] = None
|
||||
self.server_notices_room_name: Optional[str] = None
|
||||
self.server_notices_auto_join: bool = False
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
c = config.get("server_notices")
|
||||
@@ -63,4 +62,3 @@ class ServerNoticesConfig(Config):
|
||||
self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
|
||||
# todo: i18n
|
||||
self.server_notices_room_name = c.get("room_name", "Server Notices")
|
||||
self.server_notices_auto_join = c.get("auto_join", False)
|
||||
|
||||
@@ -21,7 +21,6 @@ from typing import (
|
||||
TYPE_CHECKING,
|
||||
AbstractSet,
|
||||
Awaitable,
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Collection,
|
||||
Container,
|
||||
@@ -1863,43 +1862,6 @@ class FederationClient(FederationBase):
|
||||
|
||||
return filtered_statuses, filtered_failures
|
||||
|
||||
async def download_media(
|
||||
self,
|
||||
destination: str,
|
||||
media_id: str,
|
||||
output_stream: BinaryIO,
|
||||
max_size: int,
|
||||
max_timeout_ms: int,
|
||||
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
||||
try:
|
||||
return await self.transport_layer.download_media_v3(
|
||||
destination,
|
||||
media_id,
|
||||
output_stream=output_stream,
|
||||
max_size=max_size,
|
||||
max_timeout_ms=max_timeout_ms,
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# If an error is received that is due to an unrecognised endpoint,
|
||||
# fallback to the r0 endpoint. Otherwise, consider it a legitimate error
|
||||
# and raise.
|
||||
if not is_unknown_endpoint(e):
|
||||
raise
|
||||
|
||||
logger.debug(
|
||||
"Couldn't download media %s/%s with the v3 API, falling back to the r0 API",
|
||||
destination,
|
||||
media_id,
|
||||
)
|
||||
|
||||
return await self.transport_layer.download_media_r0(
|
||||
destination,
|
||||
media_id,
|
||||
output_stream=output_stream,
|
||||
max_size=max_size,
|
||||
max_timeout_ms=max_timeout_ms,
|
||||
)
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class TimestampToEventResponse:
|
||||
|
||||
@@ -18,7 +18,6 @@ import urllib
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Collection,
|
||||
Dict,
|
||||
@@ -805,58 +804,6 @@ class TransportLayerClient:
|
||||
destination=destination, path=path, data={"user_ids": user_ids}
|
||||
)
|
||||
|
||||
async def download_media_r0(
|
||||
self,
|
||||
destination: str,
|
||||
media_id: str,
|
||||
output_stream: BinaryIO,
|
||||
max_size: int,
|
||||
max_timeout_ms: int,
|
||||
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
||||
path = f"/_matrix/media/r0/download/{destination}/{media_id}"
|
||||
|
||||
return await self.client.get_file(
|
||||
destination,
|
||||
path,
|
||||
output_stream=output_stream,
|
||||
max_size=max_size,
|
||||
args={
|
||||
# tell the remote server to 404 if it doesn't
|
||||
# recognise the server_name, to make sure we don't
|
||||
# end up with a routing loop.
|
||||
"allow_remote": "false",
|
||||
"timeout_ms": str(max_timeout_ms),
|
||||
},
|
||||
)
|
||||
|
||||
async def download_media_v3(
|
||||
self,
|
||||
destination: str,
|
||||
media_id: str,
|
||||
output_stream: BinaryIO,
|
||||
max_size: int,
|
||||
max_timeout_ms: int,
|
||||
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
||||
path = f"/_matrix/media/v3/download/{destination}/{media_id}"
|
||||
|
||||
return await self.client.get_file(
|
||||
destination,
|
||||
path,
|
||||
output_stream=output_stream,
|
||||
max_size=max_size,
|
||||
args={
|
||||
# tell the remote server to 404 if it doesn't
|
||||
# recognise the server_name, to make sure we don't
|
||||
# end up with a routing loop.
|
||||
"allow_remote": "false",
|
||||
"timeout_ms": str(max_timeout_ms),
|
||||
# Matrix 1.7 allows for this to redirect to another URL, this should
|
||||
# just be ignored for an old homeserver, so always provide it.
|
||||
"allow_redirect": "true",
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
|
||||
def _create_path(federation_prefix: str, path: str, *args: str) -> str:
|
||||
"""
|
||||
|
||||
@@ -98,22 +98,6 @@ class AccountValidityHandler:
|
||||
for callback in self._module_api_callbacks.on_user_registration_callbacks:
|
||||
await callback(user_id)
|
||||
|
||||
async def on_user_login(
|
||||
self,
|
||||
user_id: str,
|
||||
auth_provider_type: Optional[str],
|
||||
auth_provider_id: Optional[str],
|
||||
) -> None:
|
||||
"""Tell third-party modules about a user logins.
|
||||
|
||||
Args:
|
||||
user_id: The mxID of the user.
|
||||
auth_provider_type: The type of login.
|
||||
auth_provider_id: The ID of the auth provider.
|
||||
"""
|
||||
for callback in self._module_api_callbacks.on_user_login_callbacks:
|
||||
await callback(user_id, auth_provider_type, auth_provider_id)
|
||||
|
||||
@wrap_as_background_process("send_renewals")
|
||||
async def _send_renewal_emails(self) -> None:
|
||||
"""Gets the list of users whose account is expiring in the amount of time
|
||||
|
||||
@@ -212,7 +212,6 @@ class AuthHandler:
|
||||
self._password_enabled_for_reauth = hs.config.auth.password_enabled_for_reauth
|
||||
self._password_localdb_enabled = hs.config.auth.password_localdb_enabled
|
||||
self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules
|
||||
self._account_validity_handler = hs.get_account_validity_handler()
|
||||
|
||||
# Ratelimiter for failed auth during UIA. Uses same ratelimit config
|
||||
# as per `rc_login.failed_attempts`.
|
||||
@@ -1784,13 +1783,6 @@ class AuthHandler:
|
||||
client_redirect_url, "loginToken", login_token
|
||||
)
|
||||
|
||||
# Run post-login module callback handlers
|
||||
await self._account_validity_handler.on_user_login(
|
||||
user_id=registered_user_id,
|
||||
auth_provider_type=LoginType.SSO,
|
||||
auth_provider_id=auth_provider_id,
|
||||
)
|
||||
|
||||
# if the client is whitelisted, we can redirect straight to it
|
||||
if client_redirect_url.startswith(self._whitelisted_sso_clients):
|
||||
request.redirect(redirect_url)
|
||||
|
||||
@@ -383,7 +383,7 @@ class DeviceWorkerHandler:
|
||||
)
|
||||
|
||||
DEVICE_MSGS_DELETE_BATCH_LIMIT = 1000
|
||||
DEVICE_MSGS_DELETE_SLEEP_MS = 100
|
||||
DEVICE_MSGS_DELETE_SLEEP_MS = 1000
|
||||
|
||||
async def _delete_device_messages(
|
||||
self,
|
||||
@@ -396,17 +396,15 @@ class DeviceWorkerHandler:
|
||||
up_to_stream_id = task.params["up_to_stream_id"]
|
||||
|
||||
# Delete the messages in batches to avoid too much DB load.
|
||||
from_stream_id = None
|
||||
while True:
|
||||
from_stream_id, _ = await self.store.delete_messages_for_device_between(
|
||||
res = await self.store.delete_messages_for_device(
|
||||
user_id=user_id,
|
||||
device_id=device_id,
|
||||
from_stream_id=from_stream_id,
|
||||
to_stream_id=up_to_stream_id,
|
||||
up_to_stream_id=up_to_stream_id,
|
||||
limit=DeviceHandler.DEVICE_MSGS_DELETE_BATCH_LIMIT,
|
||||
)
|
||||
|
||||
if from_stream_id is None:
|
||||
if res < DeviceHandler.DEVICE_MSGS_DELETE_BATCH_LIMIT:
|
||||
return TaskStatus.COMPLETE, None, None
|
||||
|
||||
await self.clock.sleep(DeviceHandler.DEVICE_MSGS_DELETE_SLEEP_MS / 1000.0)
|
||||
|
||||
@@ -88,7 +88,7 @@ from synapse.types import (
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
||||
from synapse.util.iterutils import batch_iter, partition, sorted_topologically_batched
|
||||
from synapse.util.iterutils import batch_iter, partition
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
from synapse.util.stringutils import shortstr
|
||||
|
||||
@@ -1669,13 +1669,14 @@ class FederationEventHandler:
|
||||
|
||||
# XXX: it might be possible to kick this process off in parallel with fetching
|
||||
# the events.
|
||||
while event_map:
|
||||
# build a list of events whose auth events are not in the queue.
|
||||
roots = tuple(
|
||||
ev
|
||||
for ev in event_map.values()
|
||||
if not any(aid in event_map for aid in ev.auth_event_ids())
|
||||
)
|
||||
|
||||
# We need to persist an event's auth events before the event.
|
||||
auth_graph = {
|
||||
ev: [event_map[e_id] for e_id in ev.auth_event_ids() if e_id in event_map]
|
||||
for ev in event_map.values()
|
||||
}
|
||||
for roots in sorted_topologically_batched(event_map.values(), auth_graph):
|
||||
if not roots:
|
||||
# if *none* of the remaining events are ready, that means
|
||||
# we have a loop. This either means a bug in our logic, or that
|
||||
@@ -1697,6 +1698,9 @@ class FederationEventHandler:
|
||||
|
||||
await self._auth_and_persist_outliers_inner(room_id, roots)
|
||||
|
||||
for ev in roots:
|
||||
del event_map[ev.event_id]
|
||||
|
||||
async def _auth_and_persist_outliers_inner(
|
||||
self, room_id: str, fetched_events: Collection[EventBase]
|
||||
) -> None:
|
||||
|
||||
@@ -693,9 +693,13 @@ class EventCreationHandler:
|
||||
if require_consent and not is_exempt:
|
||||
await self.assert_accepted_privacy_policy(requester)
|
||||
|
||||
# Save the the device ID and the transaction ID in the event internal metadata.
|
||||
# This is useful to determine if we should echo the transaction_id in events.
|
||||
# Save the access token ID, the device ID and the transaction ID in the event
|
||||
# internal metadata. This is useful to determine if we should echo the
|
||||
# transaction_id in events.
|
||||
# See `synapse.events.utils.EventClientSerializer.serialize_event`
|
||||
if requester.access_token_id is not None:
|
||||
builder.internal_metadata.token_id = requester.access_token_id
|
||||
|
||||
if requester.device_id is not None:
|
||||
builder.internal_metadata.device_id = requester.device_id
|
||||
|
||||
|
||||
@@ -129,7 +129,6 @@ class ProfileHandler:
|
||||
new_displayname: str,
|
||||
by_admin: bool = False,
|
||||
deactivation: bool = False,
|
||||
propagate: bool = True,
|
||||
) -> None:
|
||||
"""Set the displayname of a user
|
||||
|
||||
@@ -139,7 +138,6 @@ class ProfileHandler:
|
||||
new_displayname: The displayname to give this user.
|
||||
by_admin: Whether this change was made by an administrator.
|
||||
deactivation: Whether this change was made while deactivating the user.
|
||||
propagate: Whether this change also applies to the user's membership events.
|
||||
"""
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this homeserver")
|
||||
@@ -190,8 +188,7 @@ class ProfileHandler:
|
||||
target_user.to_string(), profile, by_admin, deactivation
|
||||
)
|
||||
|
||||
if propagate:
|
||||
await self._update_join_states(requester, target_user)
|
||||
await self._update_join_states(requester, target_user)
|
||||
|
||||
async def get_avatar_url(self, target_user: UserID) -> Optional[str]:
|
||||
if self.hs.is_mine(target_user):
|
||||
@@ -224,7 +221,6 @@ class ProfileHandler:
|
||||
new_avatar_url: str,
|
||||
by_admin: bool = False,
|
||||
deactivation: bool = False,
|
||||
propagate: bool = True,
|
||||
) -> None:
|
||||
"""Set a new avatar URL for a user.
|
||||
|
||||
@@ -234,7 +230,6 @@ class ProfileHandler:
|
||||
new_avatar_url: The avatar URL to give this user.
|
||||
by_admin: Whether this change was made by an administrator.
|
||||
deactivation: Whether this change was made while deactivating the user.
|
||||
propagate: Whether this change also applies to the user's membership events.
|
||||
"""
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this homeserver")
|
||||
@@ -283,8 +278,7 @@ class ProfileHandler:
|
||||
target_user.to_string(), profile, by_admin, deactivation
|
||||
)
|
||||
|
||||
if propagate:
|
||||
await self._update_join_states(requester, target_user)
|
||||
await self._update_join_states(requester, target_user)
|
||||
|
||||
@cached()
|
||||
async def check_avatar_size_and_mime_type(self, mxc: str) -> bool:
|
||||
|
||||
@@ -549,7 +549,7 @@ class RoomCreationHandler:
|
||||
except (TypeError, ValueError):
|
||||
ban = 50
|
||||
needed_power_level = max(
|
||||
state_default_int, ban, max(event_power_levels.values(), default=0)
|
||||
state_default_int, ban, max(event_power_levels.values())
|
||||
)
|
||||
|
||||
# Get the user's current power level, this matches the logic in get_user_power_level,
|
||||
@@ -698,7 +698,6 @@ class RoomCreationHandler:
|
||||
config: JsonDict,
|
||||
ratelimit: bool = True,
|
||||
creator_join_profile: Optional[JsonDict] = None,
|
||||
ignore_forced_encryption: bool = False,
|
||||
) -> Tuple[str, Optional[RoomAlias], int]:
|
||||
"""Creates a new room.
|
||||
|
||||
@@ -715,8 +714,6 @@ class RoomCreationHandler:
|
||||
derived from the user's profile. If set, should contain the
|
||||
values to go in the body of the 'join' event (typically
|
||||
`avatar_url` and/or `displayname`.
|
||||
ignore_forced_encryption:
|
||||
Ignore encryption forced by `encryption_enabled_by_default_for_room_type` setting.
|
||||
|
||||
Returns:
|
||||
A 3-tuple containing:
|
||||
@@ -1018,7 +1015,6 @@ class RoomCreationHandler:
|
||||
room_alias: Optional[RoomAlias] = None,
|
||||
power_level_content_override: Optional[JsonDict] = None,
|
||||
creator_join_profile: Optional[JsonDict] = None,
|
||||
ignore_forced_encryption: bool = False,
|
||||
) -> Tuple[int, str, int]:
|
||||
"""Sends the initial events into a new room. Sends the room creation, membership,
|
||||
and power level events into the room sequentially, then creates and batches up the
|
||||
@@ -1053,8 +1049,6 @@ class RoomCreationHandler:
|
||||
creator_join_profile:
|
||||
Set to override the displayname and avatar for the creating
|
||||
user in this room.
|
||||
ignore_forced_encryption:
|
||||
Ignore encryption forced by `encryption_enabled_by_default_for_room_type` setting.
|
||||
|
||||
Returns:
|
||||
A tuple containing the stream ID, event ID and depth of the last
|
||||
@@ -1257,7 +1251,7 @@ class RoomCreationHandler:
|
||||
)
|
||||
events_to_send.append((event, context))
|
||||
|
||||
if config["encrypted"] and not ignore_forced_encryption:
|
||||
if config["encrypted"]:
|
||||
encryption_event, encryption_context = await create_event(
|
||||
EventTypes.RoomEncryption,
|
||||
{"algorithm": RoomEncryptionAlgorithms.DEFAULT},
|
||||
|
||||
@@ -2111,14 +2111,9 @@ class RoomForgetterHandler(StateDeltasHandler):
|
||||
self.pos = room_max_stream_ordering
|
||||
|
||||
if not self._hs.config.room.forget_on_leave:
|
||||
# Update the processing position, so that if the server admin turns
|
||||
# the feature on at a later date, we don't decide to forget every
|
||||
# room that has ever been left in the past.
|
||||
#
|
||||
# We wait for a short time so that we don't "tight" loop just
|
||||
# keeping the table up to date.
|
||||
await self._clock.sleep(0.5)
|
||||
|
||||
# Update the processing position, so that if the server admin turns the
|
||||
# feature on at a later date, we don't decide to forget every room that
|
||||
# has ever been left in the past.
|
||||
self.pos = self._store.get_room_max_stream_ordering()
|
||||
await self._store.update_room_forgetter_stream_pos(self.pos)
|
||||
return
|
||||
|
||||
@@ -153,18 +153,12 @@ class MatrixFederationRequest:
|
||||
"""Query arguments.
|
||||
"""
|
||||
|
||||
txn_id: str = attr.ib(init=False)
|
||||
"""Unique ID for this request (for logging), this is autogenerated.
|
||||
txn_id: Optional[str] = None
|
||||
"""Unique ID for this request (for logging)
|
||||
"""
|
||||
|
||||
uri: bytes = b""
|
||||
"""The URI of this request, usually generated from the above information.
|
||||
"""
|
||||
|
||||
_generate_uri: bool = True
|
||||
"""True to automatically generate the uri field based on the above information.
|
||||
|
||||
Set to False if manually configuring the URI.
|
||||
uri: bytes = attr.ib(init=False)
|
||||
"""The URI of this request
|
||||
"""
|
||||
|
||||
def __attrs_post_init__(self) -> None:
|
||||
@@ -174,23 +168,22 @@ class MatrixFederationRequest:
|
||||
|
||||
object.__setattr__(self, "txn_id", txn_id)
|
||||
|
||||
if self._generate_uri:
|
||||
destination_bytes = self.destination.encode("ascii")
|
||||
path_bytes = self.path.encode("ascii")
|
||||
query_bytes = encode_query_args(self.query)
|
||||
destination_bytes = self.destination.encode("ascii")
|
||||
path_bytes = self.path.encode("ascii")
|
||||
query_bytes = encode_query_args(self.query)
|
||||
|
||||
# The object is frozen so we can pre-compute this.
|
||||
uri = urllib.parse.urlunparse(
|
||||
(
|
||||
b"matrix-federation",
|
||||
destination_bytes,
|
||||
path_bytes,
|
||||
None,
|
||||
query_bytes,
|
||||
b"",
|
||||
)
|
||||
# The object is frozen so we can pre-compute this.
|
||||
uri = urllib.parse.urlunparse(
|
||||
(
|
||||
b"matrix-federation",
|
||||
destination_bytes,
|
||||
path_bytes,
|
||||
None,
|
||||
query_bytes,
|
||||
b"",
|
||||
)
|
||||
object.__setattr__(self, "uri", uri)
|
||||
)
|
||||
object.__setattr__(self, "uri", uri)
|
||||
|
||||
def get_json(self) -> Optional[JsonDict]:
|
||||
if self.json_callback:
|
||||
@@ -520,7 +513,6 @@ class MatrixFederationHttpClient:
|
||||
ignore_backoff: bool = False,
|
||||
backoff_on_404: bool = False,
|
||||
backoff_on_all_error_codes: bool = False,
|
||||
follow_redirects: bool = False,
|
||||
) -> IResponse:
|
||||
"""
|
||||
Sends a request to the given server.
|
||||
@@ -563,9 +555,6 @@ class MatrixFederationHttpClient:
|
||||
backoff_on_404: Back off if we get a 404
|
||||
backoff_on_all_error_codes: Back off if we get any error response
|
||||
|
||||
follow_redirects: True to follow the Location header of 307/308 redirect
|
||||
responses. This does not recurse.
|
||||
|
||||
Returns:
|
||||
Resolves with the HTTP response object on success.
|
||||
|
||||
@@ -725,26 +714,6 @@ class MatrixFederationHttpClient:
|
||||
response.code,
|
||||
response_phrase,
|
||||
)
|
||||
elif (
|
||||
response.code in (307, 308)
|
||||
and follow_redirects
|
||||
and response.headers.hasHeader("Location")
|
||||
):
|
||||
# The Location header *might* be relative so resolve it.
|
||||
location = response.headers.getRawHeaders(b"Location")[0]
|
||||
new_uri = urllib.parse.urljoin(request.uri, location)
|
||||
|
||||
return await self._send_request(
|
||||
attr.evolve(request, uri=new_uri, generate_uri=False),
|
||||
retry_on_dns_fail,
|
||||
timeout,
|
||||
long_retries,
|
||||
ignore_backoff,
|
||||
backoff_on_404,
|
||||
backoff_on_all_error_codes,
|
||||
# Do not continue following redirects.
|
||||
follow_redirects=False,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"{%s} [%s] Got response headers: %d %s",
|
||||
@@ -1414,7 +1383,6 @@ class MatrixFederationHttpClient:
|
||||
retry_on_dns_fail: bool = True,
|
||||
max_size: Optional[int] = None,
|
||||
ignore_backoff: bool = False,
|
||||
follow_redirects: bool = False,
|
||||
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
||||
"""GETs a file from a given homeserver
|
||||
Args:
|
||||
@@ -1424,8 +1392,6 @@ class MatrixFederationHttpClient:
|
||||
args: Optional dictionary used to create the query string.
|
||||
ignore_backoff: true to ignore the historical backoff data
|
||||
and try the request anyway.
|
||||
follow_redirects: True to follow the Location header of 307/308 redirect
|
||||
responses. This does not recurse.
|
||||
|
||||
Returns:
|
||||
Resolves with an (int,dict) tuple of
|
||||
@@ -1446,10 +1412,7 @@ class MatrixFederationHttpClient:
|
||||
)
|
||||
|
||||
response = await self._send_request(
|
||||
request,
|
||||
retry_on_dns_fail=retry_on_dns_fail,
|
||||
ignore_backoff=ignore_backoff,
|
||||
follow_redirects=follow_redirects,
|
||||
request, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff
|
||||
)
|
||||
|
||||
headers = dict(response.headers.getAllRawHeaders())
|
||||
|
||||
@@ -77,7 +77,7 @@ class MediaRepository:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.client = hs.get_federation_client()
|
||||
self.client = hs.get_federation_http_client()
|
||||
self.clock = hs.get_clock()
|
||||
self.server_name = hs.hostname
|
||||
self.store = hs.get_datastores().main
|
||||
@@ -644,13 +644,22 @@ class MediaRepository:
|
||||
file_info = FileInfo(server_name=server_name, file_id=file_id)
|
||||
|
||||
with self.media_storage.store_into_file(file_info) as (f, fname, finish):
|
||||
request_path = "/".join(
|
||||
("/_matrix/media/r0/download", server_name, media_id)
|
||||
)
|
||||
try:
|
||||
length, headers = await self.client.download_media(
|
||||
length, headers = await self.client.get_file(
|
||||
server_name,
|
||||
media_id,
|
||||
request_path,
|
||||
output_stream=f,
|
||||
max_size=self.max_upload_size,
|
||||
max_timeout_ms=max_timeout_ms,
|
||||
args={
|
||||
# tell the remote server to 404 if it doesn't
|
||||
# recognise the server_name, to make sure we don't
|
||||
# end up with a routing loop.
|
||||
"allow_remote": "false",
|
||||
"timeout_ms": str(max_timeout_ms),
|
||||
},
|
||||
)
|
||||
except RequestSendFailed as e:
|
||||
logger.warning(
|
||||
|
||||
@@ -80,7 +80,6 @@ from synapse.module_api.callbacks.account_validity_callbacks import (
|
||||
ON_LEGACY_ADMIN_REQUEST,
|
||||
ON_LEGACY_RENEW_CALLBACK,
|
||||
ON_LEGACY_SEND_MAIL_CALLBACK,
|
||||
ON_USER_LOGIN_CALLBACK,
|
||||
ON_USER_REGISTRATION_CALLBACK,
|
||||
)
|
||||
from synapse.module_api.callbacks.spamchecker_callbacks import (
|
||||
@@ -335,7 +334,6 @@ class ModuleApi:
|
||||
*,
|
||||
is_user_expired: Optional[IS_USER_EXPIRED_CALLBACK] = None,
|
||||
on_user_registration: Optional[ON_USER_REGISTRATION_CALLBACK] = None,
|
||||
on_user_login: Optional[ON_USER_LOGIN_CALLBACK] = None,
|
||||
on_legacy_send_mail: Optional[ON_LEGACY_SEND_MAIL_CALLBACK] = None,
|
||||
on_legacy_renew: Optional[ON_LEGACY_RENEW_CALLBACK] = None,
|
||||
on_legacy_admin_request: Optional[ON_LEGACY_ADMIN_REQUEST] = None,
|
||||
@@ -347,7 +345,6 @@ class ModuleApi:
|
||||
return self._callbacks.account_validity.register_callbacks(
|
||||
is_user_expired=is_user_expired,
|
||||
on_user_registration=on_user_registration,
|
||||
on_user_login=on_user_login,
|
||||
on_legacy_send_mail=on_legacy_send_mail,
|
||||
on_legacy_renew=on_legacy_renew,
|
||||
on_legacy_admin_request=on_legacy_admin_request,
|
||||
|
||||
@@ -22,7 +22,6 @@ logger = logging.getLogger(__name__)
|
||||
# Types for callbacks to be registered via the module api
|
||||
IS_USER_EXPIRED_CALLBACK = Callable[[str], Awaitable[Optional[bool]]]
|
||||
ON_USER_REGISTRATION_CALLBACK = Callable[[str], Awaitable]
|
||||
ON_USER_LOGIN_CALLBACK = Callable[[str, Optional[str], Optional[str]], Awaitable]
|
||||
# Temporary hooks to allow for a transition from `/_matrix/client` endpoints
|
||||
# to `/_synapse/client/account_validity`. See `register_callbacks` below.
|
||||
ON_LEGACY_SEND_MAIL_CALLBACK = Callable[[str], Awaitable]
|
||||
@@ -34,7 +33,6 @@ class AccountValidityModuleApiCallbacks:
|
||||
def __init__(self) -> None:
|
||||
self.is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = []
|
||||
self.on_user_registration_callbacks: List[ON_USER_REGISTRATION_CALLBACK] = []
|
||||
self.on_user_login_callbacks: List[ON_USER_LOGIN_CALLBACK] = []
|
||||
self.on_legacy_send_mail_callback: Optional[ON_LEGACY_SEND_MAIL_CALLBACK] = None
|
||||
self.on_legacy_renew_callback: Optional[ON_LEGACY_RENEW_CALLBACK] = None
|
||||
|
||||
@@ -46,7 +44,6 @@ class AccountValidityModuleApiCallbacks:
|
||||
self,
|
||||
is_user_expired: Optional[IS_USER_EXPIRED_CALLBACK] = None,
|
||||
on_user_registration: Optional[ON_USER_REGISTRATION_CALLBACK] = None,
|
||||
on_user_login: Optional[ON_USER_LOGIN_CALLBACK] = None,
|
||||
on_legacy_send_mail: Optional[ON_LEGACY_SEND_MAIL_CALLBACK] = None,
|
||||
on_legacy_renew: Optional[ON_LEGACY_RENEW_CALLBACK] = None,
|
||||
on_legacy_admin_request: Optional[ON_LEGACY_ADMIN_REQUEST] = None,
|
||||
@@ -58,9 +55,6 @@ class AccountValidityModuleApiCallbacks:
|
||||
if on_user_registration is not None:
|
||||
self.on_user_registration_callbacks.append(on_user_registration)
|
||||
|
||||
if on_user_login is not None:
|
||||
self.on_user_login_callbacks.append(on_user_login)
|
||||
|
||||
# The builtin account validity feature exposes 3 endpoints (send_mail, renew, and
|
||||
# an admin one). As part of moving the feature into a module, we need to change
|
||||
# the path from /_matrix/client/unstable/account_validity/... to
|
||||
|
||||
@@ -257,11 +257,6 @@ class ReplicationCommandHandler:
|
||||
if hs.config.redis.redis_enabled:
|
||||
self._notifier.add_lock_released_callback(self.on_lock_released)
|
||||
|
||||
# Marks if we should send POSITION commands for all streams ASAP. This
|
||||
# is checked by the `ReplicationStreamer` which manages sending
|
||||
# RDATA/POSITION commands
|
||||
self._should_announce_positions = True
|
||||
|
||||
def subscribe_to_channel(self, channel_name: str) -> None:
|
||||
"""
|
||||
Indicates that we wish to subscribe to a Redis channel by name.
|
||||
@@ -402,23 +397,29 @@ class ReplicationCommandHandler:
|
||||
return self._streams_to_replicate
|
||||
|
||||
def on_REPLICATE(self, conn: IReplicationConnection, cmd: ReplicateCommand) -> None:
|
||||
self.send_positions_to_connection()
|
||||
self.send_positions_to_connection(conn)
|
||||
|
||||
def send_positions_to_connection(self) -> None:
|
||||
def send_positions_to_connection(self, conn: IReplicationConnection) -> None:
|
||||
"""Send current position of all streams this process is source of to
|
||||
the connection.
|
||||
"""
|
||||
|
||||
self._should_announce_positions = True
|
||||
self._notifier.notify_replication()
|
||||
|
||||
def should_announce_positions(self) -> bool:
|
||||
"""Check if we should send POSITION commands for all streams ASAP."""
|
||||
return self._should_announce_positions
|
||||
|
||||
def will_announce_positions(self) -> None:
|
||||
"""Mark that we're about to send POSITIONs out for all streams."""
|
||||
self._should_announce_positions = False
|
||||
# We respond with current position of all streams this instance
|
||||
# replicates.
|
||||
for stream in self.get_streams_to_replicate():
|
||||
# Note that we use the current token as the prev token here (rather
|
||||
# than stream.last_token), as we can't be sure that there have been
|
||||
# no rows written between last token and the current token (since we
|
||||
# might be racing with the replication sending bg process).
|
||||
current_token = stream.current_token(self._instance_name)
|
||||
self.send_command(
|
||||
PositionCommand(
|
||||
stream.NAME,
|
||||
self._instance_name,
|
||||
current_token,
|
||||
current_token,
|
||||
)
|
||||
)
|
||||
|
||||
def on_USER_SYNC(
|
||||
self, conn: IReplicationConnection, cmd: UserSyncCommand
|
||||
@@ -587,21 +588,6 @@ class ReplicationCommandHandler:
|
||||
|
||||
logger.debug("Handling '%s %s'", cmd.NAME, cmd.to_line())
|
||||
|
||||
# Check if we can early discard this position. We can only do so for
|
||||
# connected streams.
|
||||
stream = self._streams[cmd.stream_name]
|
||||
if stream.can_discard_position(
|
||||
cmd.instance_name, cmd.prev_token, cmd.new_token
|
||||
) and self.is_stream_connected(conn, cmd.stream_name):
|
||||
logger.debug(
|
||||
"Discarding redundant POSITION %s/%s %s %s",
|
||||
cmd.instance_name,
|
||||
cmd.stream_name,
|
||||
cmd.prev_token,
|
||||
cmd.new_token,
|
||||
)
|
||||
return
|
||||
|
||||
self._add_command_to_stream_queue(conn, cmd)
|
||||
|
||||
async def _process_position(
|
||||
@@ -613,18 +599,6 @@ class ReplicationCommandHandler:
|
||||
"""
|
||||
stream = self._streams[stream_name]
|
||||
|
||||
if stream.can_discard_position(
|
||||
cmd.instance_name, cmd.prev_token, cmd.new_token
|
||||
) and self.is_stream_connected(conn, cmd.stream_name):
|
||||
logger.debug(
|
||||
"Discarding redundant POSITION %s/%s %s %s",
|
||||
cmd.instance_name,
|
||||
cmd.stream_name,
|
||||
cmd.prev_token,
|
||||
cmd.new_token,
|
||||
)
|
||||
return
|
||||
|
||||
# We're about to go and catch up with the stream, so remove from set
|
||||
# of connected streams.
|
||||
for streams in self._streams_by_connection.values():
|
||||
@@ -652,9 +626,8 @@ class ReplicationCommandHandler:
|
||||
# for why this can happen.
|
||||
|
||||
logger.info(
|
||||
"Fetching replication rows for '%s' / %s between %i and %i",
|
||||
"Fetching replication rows for '%s' between %i and %i",
|
||||
stream_name,
|
||||
cmd.instance_name,
|
||||
current_token,
|
||||
cmd.new_token,
|
||||
)
|
||||
@@ -684,13 +657,6 @@ class ReplicationCommandHandler:
|
||||
|
||||
self._streams_by_connection.setdefault(conn, set()).add(stream_name)
|
||||
|
||||
def is_stream_connected(
|
||||
self, conn: IReplicationConnection, stream_name: str
|
||||
) -> bool:
|
||||
"""Return if stream has been successfully connected and is ready to
|
||||
receive updates"""
|
||||
return stream_name in self._streams_by_connection.get(conn, ())
|
||||
|
||||
def on_REMOTE_SERVER_UP(
|
||||
self, conn: IReplicationConnection, cmd: RemoteServerUpCommand
|
||||
) -> None:
|
||||
|
||||
@@ -141,7 +141,7 @@ class RedisSubscriber(SubscriberProtocol):
|
||||
# We send out our positions when there is a new connection in case the
|
||||
# other side missed updates. We do this for Redis connections as the
|
||||
# otherside won't know we've connected and so won't issue a REPLICATE.
|
||||
self.synapse_handler.send_positions_to_connection()
|
||||
self.synapse_handler.send_positions_to_connection(self)
|
||||
|
||||
def messageReceived(self, pattern: str, channel: str, message: str) -> None:
|
||||
"""Received a message from redis."""
|
||||
|
||||
@@ -123,7 +123,7 @@ class ReplicationStreamer:
|
||||
|
||||
# We check up front to see if anything has actually changed, as we get
|
||||
# poked because of changes that happened on other instances.
|
||||
if not self.command_handler.should_announce_positions() and all(
|
||||
if all(
|
||||
stream.last_token == stream.current_token(self._instance_name)
|
||||
for stream in self.streams
|
||||
):
|
||||
@@ -158,21 +158,6 @@ class ReplicationStreamer:
|
||||
all_streams = list(all_streams)
|
||||
random.shuffle(all_streams)
|
||||
|
||||
if self.command_handler.should_announce_positions():
|
||||
# We need to send out POSITIONs for all streams, usually
|
||||
# because a worker has reconnected.
|
||||
self.command_handler.will_announce_positions()
|
||||
|
||||
for stream in all_streams:
|
||||
self.command_handler.send_command(
|
||||
PositionCommand(
|
||||
stream.NAME,
|
||||
self._instance_name,
|
||||
stream.last_token,
|
||||
stream.last_token,
|
||||
)
|
||||
)
|
||||
|
||||
for stream in all_streams:
|
||||
if stream.last_token == stream.current_token(
|
||||
self._instance_name
|
||||
|
||||
@@ -144,16 +144,6 @@ class Stream:
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def can_discard_position(
|
||||
self, instance_name: str, prev_token: int, new_token: int
|
||||
) -> bool:
|
||||
"""Whether or not a position command for this stream can be discarded.
|
||||
|
||||
Useful for streams that can never go backwards and where we already know
|
||||
the stream ID for the instance has advanced.
|
||||
"""
|
||||
return False
|
||||
|
||||
def discard_updates_and_advance(self) -> None:
|
||||
"""Called when the stream should advance but the updates would be discarded,
|
||||
e.g. when there are no currently connected workers.
|
||||
@@ -231,14 +221,6 @@ class _StreamFromIdGen(Stream):
|
||||
def minimal_local_current_token(self) -> Token:
|
||||
return self._stream_id_gen.get_minimal_local_current_token()
|
||||
|
||||
def can_discard_position(
|
||||
self, instance_name: str, prev_token: int, new_token: int
|
||||
) -> bool:
|
||||
# These streams can't go backwards, so we know we can ignore any
|
||||
# positions where the tokens are from before the current token.
|
||||
|
||||
return new_token <= self.current_token(instance_name)
|
||||
|
||||
|
||||
def current_token_without_instance(
|
||||
current_token: Callable[[], int]
|
||||
@@ -305,14 +287,6 @@ class BackfillStream(Stream):
|
||||
# which means we need to negate it.
|
||||
return -self.store._backfill_id_gen.get_minimal_local_current_token()
|
||||
|
||||
def can_discard_position(
|
||||
self, instance_name: str, prev_token: int, new_token: int
|
||||
) -> bool:
|
||||
# Backfill stream can't go backwards, so we know we can ignore any
|
||||
# positions where the tokens are from before the current token.
|
||||
|
||||
return new_token <= self.current_token(instance_name)
|
||||
|
||||
|
||||
class PresenceStream(_StreamFromIdGen):
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
@@ -527,14 +501,6 @@ class CachesStream(Stream):
|
||||
return self.store._cache_id_gen.get_minimal_local_current_token()
|
||||
return self.current_token(self.local_instance_name)
|
||||
|
||||
def can_discard_position(
|
||||
self, instance_name: str, prev_token: int, new_token: int
|
||||
) -> bool:
|
||||
# Caches streams can't go backwards, so we know we can ignore any
|
||||
# positions where the tokens are from before the current token.
|
||||
|
||||
return new_token <= self.current_token(instance_name)
|
||||
|
||||
|
||||
class DeviceListsStream(_StreamFromIdGen):
|
||||
"""Either a user has updated their devices or a remote server needs to be
|
||||
@@ -621,7 +587,7 @@ class ToDeviceStream(_StreamFromIdGen):
|
||||
super().__init__(
|
||||
hs.get_instance_name(),
|
||||
store.get_all_new_device_messages,
|
||||
store._to_device_msg_id_gen,
|
||||
store._device_inbox_id_gen,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -89,8 +89,8 @@ class ListDestinationsRestServlet(RestServlet):
|
||||
"destinations": [
|
||||
{
|
||||
"destination": r[0],
|
||||
"retry_last_ts": r[1] or 0,
|
||||
"retry_interval": r[2] or 0,
|
||||
"retry_last_ts": r[1],
|
||||
"retry_interval": r[2],
|
||||
"failure_ts": r[3],
|
||||
"last_successful_stream_ordering": r[4],
|
||||
}
|
||||
|
||||
@@ -630,12 +630,6 @@ class UserRegisterServlet(RestServlet):
|
||||
if not hmac.compare_digest(want_mac.encode("ascii"), got_mac.encode("ascii")):
|
||||
raise SynapseError(HTTPStatus.FORBIDDEN, "HMAC incorrect")
|
||||
|
||||
should_issue_refresh_token = body.get("refresh_token", False)
|
||||
if not isinstance(should_issue_refresh_token, bool):
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST, "refresh_token must be a boolean"
|
||||
)
|
||||
|
||||
# Reuse the parts of RegisterRestServlet to reduce code duplication
|
||||
from synapse.rest.client.register import RegisterRestServlet
|
||||
|
||||
@@ -651,9 +645,7 @@ class UserRegisterServlet(RestServlet):
|
||||
approved=True,
|
||||
)
|
||||
|
||||
result = await register._create_registration_details(
|
||||
user_id, body, should_issue_refresh_token=should_issue_refresh_token
|
||||
)
|
||||
result = await register._create_registration_details(user_id, body)
|
||||
return HTTPStatus.OK, result
|
||||
|
||||
|
||||
|
||||
@@ -115,7 +115,6 @@ class LoginRestServlet(RestServlet):
|
||||
self.registration_handler = hs.get_registration_handler()
|
||||
self._sso_handler = hs.get_sso_handler()
|
||||
self._spam_checker = hs.get_module_api_callbacks().spam_checker
|
||||
self._account_validity_handler = hs.get_account_validity_handler()
|
||||
|
||||
self._well_known_builder = WellKnownBuilder(hs)
|
||||
self._address_ratelimiter = Ratelimiter(
|
||||
@@ -471,13 +470,6 @@ class LoginRestServlet(RestServlet):
|
||||
device_id=device_id,
|
||||
)
|
||||
|
||||
# execute the callback
|
||||
await self._account_validity_handler.on_user_login(
|
||||
user_id,
|
||||
auth_provider_type=login_submission.get("type"),
|
||||
auth_provider_id=auth_provider_id,
|
||||
)
|
||||
|
||||
if valid_until_ms is not None:
|
||||
expires_in_ms = valid_until_ms - self.clock.time_msec()
|
||||
result["expires_in_ms"] = expires_in_ms
|
||||
|
||||
@@ -13,17 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
""" This module contains REST servlets to do with profile: /profile/<paths> """
|
||||
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_boolean,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import JsonDict, UserID
|
||||
@@ -32,20 +27,6 @@ if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
def _read_propagate(hs: "HomeServer", request: SynapseRequest) -> bool:
|
||||
# This will always be set by the time Twisted calls us.
|
||||
assert request.args is not None
|
||||
|
||||
propagate = True
|
||||
if hs.config.experimental.msc4069_profile_inhibit_propagation:
|
||||
do_propagate = request.args.get(b"org.matrix.msc4069.propagate")
|
||||
if do_propagate is not None:
|
||||
propagate = parse_boolean(
|
||||
request, "org.matrix.msc4069.propagate", default=False
|
||||
)
|
||||
return propagate
|
||||
|
||||
|
||||
class ProfileDisplaynameRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns("/profile/(?P<user_id>[^/]*)/displayname", v1=True)
|
||||
CATEGORY = "Event sending requests"
|
||||
@@ -99,11 +80,7 @@ class ProfileDisplaynameRestServlet(RestServlet):
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
propagate = _read_propagate(self.hs, request)
|
||||
|
||||
await self.profile_handler.set_displayname(
|
||||
user, requester, new_name, is_admin, propagate=propagate
|
||||
)
|
||||
await self.profile_handler.set_displayname(user, requester, new_name, is_admin)
|
||||
|
||||
return 200, {}
|
||||
|
||||
@@ -158,10 +135,8 @@ class ProfileAvatarURLRestServlet(RestServlet):
|
||||
400, "Missing key 'avatar_url'", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
|
||||
propagate = _read_propagate(self.hs, request)
|
||||
|
||||
await self.profile_handler.set_avatar_url(
|
||||
user, requester, new_avatar_url, is_admin, propagate=propagate
|
||||
user, requester, new_avatar_url, is_admin
|
||||
)
|
||||
|
||||
return 200, {}
|
||||
|
||||
@@ -80,9 +80,6 @@ class VersionsRestServlet(RestServlet):
|
||||
"v1.4",
|
||||
"v1.5",
|
||||
"v1.6",
|
||||
"v1.7",
|
||||
"v1.8",
|
||||
"v1.9",
|
||||
],
|
||||
# as per MSC1497:
|
||||
"unstable_features": {
|
||||
@@ -129,8 +126,6 @@ class VersionsRestServlet(RestServlet):
|
||||
"org.matrix.msc3981": self.config.experimental.msc3981_recurse_relations,
|
||||
# Adds support for deleting account data.
|
||||
"org.matrix.msc3391": self.config.experimental.msc3391_enabled,
|
||||
# Allows clients to inhibit profile update propagation.
|
||||
"org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -178,8 +178,6 @@ class ServerNoticesManager:
|
||||
"avatar_url": self._config.servernotices.server_notices_mxid_avatar_url,
|
||||
}
|
||||
|
||||
# `ignore_forced_encryption` is used to bypass `encryption_enabled_by_default_for_room_type`
|
||||
# setting if it set, since the server notices will not be encrypted anyway.
|
||||
room_id, _, _ = await self._room_creation_handler.create_room(
|
||||
requester,
|
||||
config={
|
||||
@@ -189,7 +187,6 @@ class ServerNoticesManager:
|
||||
},
|
||||
ratelimit=False,
|
||||
creator_join_profile=join_profile,
|
||||
ignore_forced_encryption=True,
|
||||
)
|
||||
|
||||
self.maybe_get_notice_room_for_user.invalidate((user_id,))
|
||||
@@ -224,27 +221,13 @@ class ServerNoticesManager:
|
||||
if room.room_id == room_id:
|
||||
return
|
||||
|
||||
user_id_obj = UserID.from_string(user_id)
|
||||
await self._room_member_handler.update_membership(
|
||||
requester=requester,
|
||||
target=user_id_obj,
|
||||
target=UserID.from_string(user_id),
|
||||
room_id=room_id,
|
||||
action="invite",
|
||||
ratelimit=False,
|
||||
)
|
||||
|
||||
if self._config.servernotices.server_notices_auto_join:
|
||||
user_requester = create_requester(
|
||||
user_id, authenticated_entity=self._server_name
|
||||
)
|
||||
await self._room_member_handler.update_membership(
|
||||
requester=user_requester,
|
||||
target=user_id_obj,
|
||||
room_id=room_id,
|
||||
action="join",
|
||||
ratelimit=False,
|
||||
)
|
||||
|
||||
async def _update_notice_user_profile_if_changed(
|
||||
self,
|
||||
requester: Requester,
|
||||
@@ -285,6 +268,5 @@ class ServerNoticesManager:
|
||||
target=UserID.from_string(self.server_notices_mxid),
|
||||
room_id=room_id,
|
||||
action="join",
|
||||
ratelimit=False,
|
||||
content={"displayname": display_name, "avatar_url": avatar_url},
|
||||
)
|
||||
|
||||
@@ -768,8 +768,9 @@ class BackgroundUpdater:
|
||||
|
||||
# override the global statement timeout to avoid accidentally squashing
|
||||
# a long-running index creation process
|
||||
timeout_sql = "SET SESSION statement_timeout = 0"
|
||||
c.execute(timeout_sql)
|
||||
self.db_pool.engine.attempt_to_set_statement_timeout(
|
||||
c, 0, for_transaction=True
|
||||
)
|
||||
|
||||
sql = (
|
||||
"CREATE %(unique)s INDEX CONCURRENTLY %(name)s"
|
||||
@@ -791,12 +792,6 @@ class BackgroundUpdater:
|
||||
logger.debug("[SQL] %s", sql)
|
||||
c.execute(sql)
|
||||
finally:
|
||||
# mypy ignore - `statement_timeout` is defined on PostgresEngine
|
||||
# reset the global timeout to the default
|
||||
default_timeout = self.db_pool.engine.statement_timeout # type: ignore[attr-defined]
|
||||
undo_timeout_sql = f"SET statement_timeout = {default_timeout}"
|
||||
conn.cursor().execute(undo_timeout_sql)
|
||||
|
||||
conn.engine.attempt_to_set_autocommit(conn.conn, False)
|
||||
|
||||
def create_index_sqlite(conn: "LoggingDatabaseConnection") -> None:
|
||||
|
||||
@@ -465,15 +465,18 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke
|
||||
#
|
||||
# This works by finding the max last_seen that is less than the given
|
||||
# time, but has no more than N rows before it, deleting all rows with
|
||||
# a lesser last_seen time. (We use an `IN` clause to force postgres to
|
||||
# use the index, otherwise it tends to do a seq scan).
|
||||
# a lesser last_seen time. (We COALESCE so that the sub-SELECT always
|
||||
# returns exactly one row).
|
||||
sql = """
|
||||
DELETE FROM user_ips
|
||||
WHERE last_seen IN (
|
||||
SELECT last_seen FROM user_ips
|
||||
WHERE last_seen <= ?
|
||||
ORDER BY last_seen ASC
|
||||
LIMIT 5000
|
||||
WHERE last_seen <= (
|
||||
SELECT COALESCE(MAX(last_seen), -1)
|
||||
FROM (
|
||||
SELECT last_seen FROM user_ips
|
||||
WHERE last_seen <= ?
|
||||
ORDER BY last_seen ASC
|
||||
LIMIT 5000
|
||||
) AS u
|
||||
)
|
||||
"""
|
||||
|
||||
@@ -586,27 +589,6 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke
|
||||
device_id: Optional[str],
|
||||
now: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Record that `user_id` used `access_token` from this `ip` address.
|
||||
|
||||
This method does two things.
|
||||
|
||||
1. It queues up a row to be upserted into the `client_ips` table. These happen
|
||||
periodically; see _update_client_ips_batch.
|
||||
2. It immediately records this user as having taken action for the purposes of
|
||||
MAU tracking.
|
||||
|
||||
Any DB writes take place on the background tasks worker, falling back to the
|
||||
main process. If we're not that worker, this method emits a replication payload
|
||||
to run this logic on that worker.
|
||||
|
||||
Two caveats to note:
|
||||
|
||||
- We only take action once per LAST_SEEN_GRANULARITY, to avoid spamming the
|
||||
DB with writes.
|
||||
- Requests using the sliding-sync proxy's user agent are excluded, as its
|
||||
requests are not directly driven by end-users. This is a hack and we're not
|
||||
very proud of it.
|
||||
"""
|
||||
# The sync proxy continuously triggers /sync even if the user is not
|
||||
# present so should be excluded from user_ips entries.
|
||||
if user_agent == "sync-v3-proxy-":
|
||||
|
||||
@@ -87,32 +87,25 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
self._instance_name in hs.config.worker.writers.to_device
|
||||
)
|
||||
|
||||
self._to_device_msg_id_gen: AbstractStreamIdGenerator = (
|
||||
self._device_inbox_id_gen: AbstractStreamIdGenerator = (
|
||||
MultiWriterIdGenerator(
|
||||
db_conn=db_conn,
|
||||
db=database,
|
||||
notifier=hs.get_replication_notifier(),
|
||||
stream_name="to_device",
|
||||
instance_name=self._instance_name,
|
||||
tables=[
|
||||
("device_inbox", "instance_name", "stream_id"),
|
||||
("device_federation_outbox", "instance_name", "stream_id"),
|
||||
],
|
||||
tables=[("device_inbox", "instance_name", "stream_id")],
|
||||
sequence_name="device_inbox_sequence",
|
||||
writers=hs.config.worker.writers.to_device,
|
||||
)
|
||||
)
|
||||
else:
|
||||
self._can_write_to_device = True
|
||||
self._to_device_msg_id_gen = StreamIdGenerator(
|
||||
db_conn,
|
||||
hs.get_replication_notifier(),
|
||||
"device_inbox",
|
||||
"stream_id",
|
||||
extra_tables=[("device_federation_outbox", "stream_id")],
|
||||
self._device_inbox_id_gen = StreamIdGenerator(
|
||||
db_conn, hs.get_replication_notifier(), "device_inbox", "stream_id"
|
||||
)
|
||||
|
||||
max_device_inbox_id = self._to_device_msg_id_gen.get_current_token()
|
||||
max_device_inbox_id = self._device_inbox_id_gen.get_current_token()
|
||||
device_inbox_prefill, min_device_inbox_id = self.db_pool.get_cache_dict(
|
||||
db_conn,
|
||||
"device_inbox",
|
||||
@@ -152,8 +145,8 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
) -> None:
|
||||
if stream_name == ToDeviceStream.NAME:
|
||||
# If replication is happening than postgres must be being used.
|
||||
assert isinstance(self._to_device_msg_id_gen, MultiWriterIdGenerator)
|
||||
self._to_device_msg_id_gen.advance(instance_name, token)
|
||||
assert isinstance(self._device_inbox_id_gen, MultiWriterIdGenerator)
|
||||
self._device_inbox_id_gen.advance(instance_name, token)
|
||||
for row in rows:
|
||||
if row.entity.startswith("@"):
|
||||
self._device_inbox_stream_cache.entity_has_changed(
|
||||
@@ -169,11 +162,11 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
self, stream_name: str, instance_name: str, token: int
|
||||
) -> None:
|
||||
if stream_name == ToDeviceStream.NAME:
|
||||
self._to_device_msg_id_gen.advance(instance_name, token)
|
||||
self._device_inbox_id_gen.advance(instance_name, token)
|
||||
super().process_replication_position(stream_name, instance_name, token)
|
||||
|
||||
def get_to_device_stream_token(self) -> int:
|
||||
return self._to_device_msg_id_gen.get_current_token()
|
||||
return self._device_inbox_id_gen.get_current_token()
|
||||
|
||||
async def get_messages_for_user_devices(
|
||||
self,
|
||||
@@ -457,12 +450,14 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
up_to_stream_id: int,
|
||||
limit: Optional[int] = None,
|
||||
) -> int:
|
||||
"""
|
||||
Args:
|
||||
user_id: The recipient user_id.
|
||||
device_id: The recipient device_id.
|
||||
up_to_stream_id: Where to delete messages up to.
|
||||
limit: maximum number of messages to delete
|
||||
|
||||
Returns:
|
||||
The number of messages deleted.
|
||||
@@ -483,22 +478,32 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
log_kv({"message": "No changes in cache since last check"})
|
||||
return 0
|
||||
|
||||
from_stream_id = None
|
||||
count = 0
|
||||
while True:
|
||||
from_stream_id, loop_count = await self.delete_messages_for_device_between(
|
||||
user_id,
|
||||
device_id,
|
||||
from_stream_id=from_stream_id,
|
||||
to_stream_id=up_to_stream_id,
|
||||
limit=1000,
|
||||
)
|
||||
count += loop_count
|
||||
if from_stream_id is None:
|
||||
break
|
||||
def delete_messages_for_device_txn(txn: LoggingTransaction) -> int:
|
||||
limit_statement = "" if limit is None else f"LIMIT {limit}"
|
||||
sql = f"""
|
||||
DELETE FROM device_inbox WHERE user_id = ? AND device_id = ? AND stream_id <= (
|
||||
SELECT MAX(stream_id) FROM (
|
||||
SELECT stream_id FROM device_inbox
|
||||
WHERE user_id = ? AND device_id = ? AND stream_id <= ?
|
||||
ORDER BY stream_id
|
||||
{limit_statement}
|
||||
) AS q1
|
||||
)
|
||||
"""
|
||||
txn.execute(sql, (user_id, device_id, user_id, device_id, up_to_stream_id))
|
||||
return txn.rowcount
|
||||
|
||||
count = await self.db_pool.runInteraction(
|
||||
"delete_messages_for_device", delete_messages_for_device_txn
|
||||
)
|
||||
|
||||
log_kv({"message": f"deleted {count} messages for device", "count": count})
|
||||
|
||||
# In this case we don't know if we hit the limit or the delete is complete
|
||||
# so let's not update the cache.
|
||||
if count == limit:
|
||||
return count
|
||||
|
||||
# Update the cache, ensuring that we only ever increase the value
|
||||
updated_last_deleted_stream_id = self._last_device_delete_cache.get(
|
||||
(user_id, device_id), 0
|
||||
@@ -509,74 +514,6 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
|
||||
return count
|
||||
|
||||
@trace
|
||||
async def delete_messages_for_device_between(
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
from_stream_id: Optional[int],
|
||||
to_stream_id: int,
|
||||
limit: int,
|
||||
) -> Tuple[Optional[int], int]:
|
||||
"""Delete N device messages between the stream IDs, returning the
|
||||
highest stream ID deleted (or None if all messages in the range have
|
||||
been deleted) and the number of messages deleted.
|
||||
|
||||
This is more efficient than `delete_messages_for_device` when calling in
|
||||
a loop to batch delete messages.
|
||||
"""
|
||||
|
||||
# Keeping track of a lower bound of stream ID where we've deleted
|
||||
# everything below makes the queries much faster. Otherwise, every time
|
||||
# we scan for rows to delete we'd re-scan across all the rows that have
|
||||
# previously deleted (until the next table VACUUM).
|
||||
|
||||
if from_stream_id is None:
|
||||
# Minimum device stream ID is 1.
|
||||
from_stream_id = 0
|
||||
|
||||
def delete_messages_for_device_between_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> Tuple[Optional[int], int]:
|
||||
txn.execute(
|
||||
"""
|
||||
SELECT MAX(stream_id) FROM (
|
||||
SELECT stream_id FROM device_inbox
|
||||
WHERE user_id = ? AND device_id = ?
|
||||
AND ? < stream_id AND stream_id <= ?
|
||||
ORDER BY stream_id
|
||||
LIMIT ?
|
||||
) AS d
|
||||
""",
|
||||
(user_id, device_id, from_stream_id, to_stream_id, limit),
|
||||
)
|
||||
row = txn.fetchone()
|
||||
if row is None or row[0] is None:
|
||||
return None, 0
|
||||
|
||||
(max_stream_id,) = row
|
||||
|
||||
txn.execute(
|
||||
"""
|
||||
DELETE FROM device_inbox
|
||||
WHERE user_id = ? AND device_id = ?
|
||||
AND ? < stream_id AND stream_id <= ?
|
||||
""",
|
||||
(user_id, device_id, from_stream_id, max_stream_id),
|
||||
)
|
||||
|
||||
num_deleted = txn.rowcount
|
||||
if num_deleted < limit:
|
||||
return None, num_deleted
|
||||
|
||||
return max_stream_id, num_deleted
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"delete_messages_for_device_between",
|
||||
delete_messages_for_device_between_txn,
|
||||
db_autocommit=True, # We don't need to run in a transaction
|
||||
)
|
||||
|
||||
@trace
|
||||
async def get_new_device_msgs_for_remote(
|
||||
self, destination: str, last_stream_id: int, current_stream_id: int, limit: int
|
||||
@@ -808,7 +745,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
msg.get(EventContentFields.TO_DEVICE_MSGID),
|
||||
)
|
||||
|
||||
async with self._to_device_msg_id_gen.get_next() as stream_id:
|
||||
async with self._device_inbox_id_gen.get_next() as stream_id:
|
||||
now_ms = self._clock.time_msec()
|
||||
await self.db_pool.runInteraction(
|
||||
"add_messages_to_device_inbox", add_messages_txn, now_ms, stream_id
|
||||
@@ -820,7 +757,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
destination, stream_id
|
||||
)
|
||||
|
||||
return self._to_device_msg_id_gen.get_current_token()
|
||||
return self._device_inbox_id_gen.get_current_token()
|
||||
|
||||
async def add_messages_from_remote_to_device_inbox(
|
||||
self,
|
||||
@@ -864,7 +801,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
txn, stream_id, local_messages_by_user_then_device
|
||||
)
|
||||
|
||||
async with self._to_device_msg_id_gen.get_next() as stream_id:
|
||||
async with self._device_inbox_id_gen.get_next() as stream_id:
|
||||
now_ms = self._clock.time_msec()
|
||||
await self.db_pool.runInteraction(
|
||||
"add_messages_from_remote_to_device_inbox",
|
||||
|
||||
@@ -301,11 +301,6 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
||||
# Add the initial set of chains, excluding the sequence corresponding to
|
||||
# initial event.
|
||||
for chain_id, seq_no in event_chains.items():
|
||||
# Check if the initial event is the first item in the chain. If so, then
|
||||
# there is nothing new to add from this chain.
|
||||
if seq_no == 1:
|
||||
continue
|
||||
|
||||
chains[chain_id] = max(seq_no - 1, chains.get(chain_id, 0))
|
||||
|
||||
# Now for each chain we figure out the maximum sequence number reachable
|
||||
|
||||
@@ -311,14 +311,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
self._background_drop_null_thread_id_indexes,
|
||||
)
|
||||
|
||||
# Add a room ID index to speed up room deletion
|
||||
self.db_pool.updates.register_background_index_update(
|
||||
"event_push_summary_index_room_id",
|
||||
index_name="event_push_summary_index_room_id",
|
||||
table="event_push_summary",
|
||||
columns=["room_id"],
|
||||
)
|
||||
|
||||
async def _background_drop_null_thread_id_indexes(
|
||||
self, progress: JsonDict, batch_size: int
|
||||
) -> int:
|
||||
|
||||
@@ -89,10 +89,11 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
||||
# furthermore, we might already have the table from a previous (failed)
|
||||
# purge attempt, so let's drop the table first.
|
||||
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
# Disable statement timeouts for this transaction; purging rooms can
|
||||
# take a while!
|
||||
txn.execute("SET LOCAL statement_timeout = 0")
|
||||
# Disable statement timeouts for this transaction; purging rooms can
|
||||
# take a while!
|
||||
self.database_engine.attempt_to_set_statement_timeout(
|
||||
txn, 0, for_transaction=True
|
||||
)
|
||||
|
||||
txn.execute("DROP TABLE IF EXISTS events_to_purge")
|
||||
|
||||
|
||||
@@ -601,7 +601,7 @@ class PusherBackgroundUpdatesStore(SQLBaseStore):
|
||||
(last_pusher_id, batch_size),
|
||||
)
|
||||
|
||||
rows = cast(List[Tuple[int, Optional[str], Optional[str]]], txn.fetchall())
|
||||
rows = txn.fetchall()
|
||||
if len(rows) == 0:
|
||||
return 0
|
||||
|
||||
@@ -617,7 +617,7 @@ class PusherBackgroundUpdatesStore(SQLBaseStore):
|
||||
txn=txn,
|
||||
table="pushers",
|
||||
key_names=("id",),
|
||||
key_values=[(row[0],) for row in rows],
|
||||
key_values=[row[0] for row in rows],
|
||||
value_names=("device_id", "access_token"),
|
||||
# If there was already a device_id on the pusher, we only want to clear
|
||||
# the access_token column, so we keep the existing device_id. Otherwise,
|
||||
|
||||
@@ -36,6 +36,9 @@ CursorType = TypeVar("CursorType", bound=Cursor)
|
||||
|
||||
|
||||
class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCMeta):
|
||||
# The default statement timeout to use for transactions.
|
||||
statement_timeout: Optional[int] = None
|
||||
|
||||
def __init__(self, module: DBAPI2Module, config: Mapping[str, Any]):
|
||||
self.module = module
|
||||
|
||||
@@ -132,6 +135,16 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
def attempt_to_set_statement_timeout(
|
||||
self, cursor: CursorType, statement_timeout: int, for_transaction: bool
|
||||
) -> None:
|
||||
"""Attempt to set the cursor's statement timeout.
|
||||
|
||||
Note this has no effect on SQLite3.
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abc.abstractmethod
|
||||
def executescript(cursor: CursorType, script: str) -> None:
|
||||
|
||||
@@ -52,7 +52,7 @@ class PostgresEngine(
|
||||
# some degenerate query plan has been created and the client has probably
|
||||
# timed out/walked off anyway.
|
||||
# This is in milliseconds.
|
||||
self.statement_timeout: Optional[int] = database_config.get(
|
||||
self.statement_timeout = database_config.get(
|
||||
"statement_timeout", 60 * 60 * 1000
|
||||
)
|
||||
self._version: Optional[int] = None # unknown as yet
|
||||
@@ -169,7 +169,11 @@ class PostgresEngine(
|
||||
|
||||
# Abort really long-running statements and turn them into errors.
|
||||
if self.statement_timeout is not None:
|
||||
cursor.execute("SET statement_timeout TO ?", (self.statement_timeout,))
|
||||
self.attempt_to_set_statement_timeout(
|
||||
cast(psycopg2.extensions.cursor, cursor.txn),
|
||||
self.statement_timeout,
|
||||
for_transaction=False,
|
||||
)
|
||||
|
||||
cursor.close()
|
||||
db_conn.commit()
|
||||
@@ -233,6 +237,18 @@ class PostgresEngine(
|
||||
isolation_level = self.isolation_level_map[isolation_level]
|
||||
return conn.set_isolation_level(isolation_level)
|
||||
|
||||
def attempt_to_set_statement_timeout(
|
||||
self,
|
||||
cursor: psycopg2.extensions.cursor,
|
||||
statement_timeout: int,
|
||||
for_transaction: bool,
|
||||
) -> None:
|
||||
if for_transaction:
|
||||
sql = "SET LOCAL statement_timeout TO ?"
|
||||
else:
|
||||
sql = "SET statement_timeout TO ?"
|
||||
cursor.execute(sql, (statement_timeout,))
|
||||
|
||||
@staticmethod
|
||||
def executescript(cursor: psycopg2.extensions.cursor, script: str) -> None:
|
||||
"""Execute a chunk of SQL containing multiple semicolon-delimited statements.
|
||||
|
||||
@@ -143,6 +143,12 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection, sqlite3.Cursor]):
|
||||
# All transactions are SERIALIZABLE by default in sqlite
|
||||
pass
|
||||
|
||||
def attempt_to_set_statement_timeout(
|
||||
self, cursor: sqlite3.Cursor, statement_timeout: int, for_transaction: bool
|
||||
) -> None:
|
||||
# Not supported.
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def executescript(cursor: sqlite3.Cursor, script: str) -> None:
|
||||
"""Execute a chunk of SQL containing multiple semicolon-delimited statements.
|
||||
|
||||
@@ -129,8 +129,8 @@ Changes in SCHEMA_VERSION = 83
|
||||
|
||||
|
||||
SCHEMA_COMPAT_VERSION = (
|
||||
# The event_txn_id table and tables from MSC2716 no longer exist.
|
||||
83
|
||||
# The `event_txn_id_device_id` must be written to for new events.
|
||||
80
|
||||
)
|
||||
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
||||
|
||||
|
||||
8
synapse/storage/schema/common/full_schemas/54/full.sql
Normal file
8
synapse/storage/schema/common/full_schemas/54/full.sql
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
|
||||
CREATE TABLE background_updates (
|
||||
update_name text NOT NULL,
|
||||
progress_json text NOT NULL,
|
||||
depends_on text,
|
||||
CONSTRAINT background_updates_uniqueness UNIQUE (update_name)
|
||||
);
|
||||
@@ -0,0 +1,88 @@
|
||||
/* Copyright 2023 The Matrix.org Foundation C.I.C
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
-- Annotate some tables in Postgres with a REPLICA IDENTITY.
|
||||
-- Any table that doesn't have a primary key should be annotated explicitly with
|
||||
-- a REPLICA IDENTITY so that logical replication can be used.
|
||||
-- If this is not done, then UPDATE and DELETE statements on those tables
|
||||
-- will fail if logical replication is in use.
|
||||
|
||||
|
||||
-- Where possible, re-use unique indices already defined on tables as a replica
|
||||
-- identity.
|
||||
ALTER TABLE appservice_room_list REPLICA IDENTITY USING INDEX appservice_room_list_idx;
|
||||
ALTER TABLE batch_events REPLICA IDENTITY USING INDEX chunk_events_event_id;
|
||||
ALTER TABLE blocked_rooms REPLICA IDENTITY USING INDEX blocked_rooms_idx;
|
||||
ALTER TABLE cache_invalidation_stream_by_instance REPLICA IDENTITY USING INDEX cache_invalidation_stream_by_instance_id;
|
||||
ALTER TABLE device_lists_changes_in_room REPLICA IDENTITY USING INDEX device_lists_changes_in_stream_id;
|
||||
ALTER TABLE device_lists_outbound_last_success REPLICA IDENTITY USING INDEX device_lists_outbound_last_success_unique_idx;
|
||||
ALTER TABLE device_lists_remote_cache REPLICA IDENTITY USING INDEX device_lists_remote_cache_unique_id;
|
||||
ALTER TABLE device_lists_remote_extremeties REPLICA IDENTITY USING INDEX device_lists_remote_extremeties_unique_idx;
|
||||
ALTER TABLE device_lists_remote_resync REPLICA IDENTITY USING INDEX device_lists_remote_resync_idx;
|
||||
ALTER TABLE e2e_cross_signing_keys REPLICA IDENTITY USING INDEX e2e_cross_signing_keys_stream_idx;
|
||||
ALTER TABLE e2e_room_keys REPLICA IDENTITY USING INDEX e2e_room_keys_with_version_idx;
|
||||
ALTER TABLE e2e_room_keys_versions REPLICA IDENTITY USING INDEX e2e_room_keys_versions_idx;
|
||||
ALTER TABLE erased_users REPLICA IDENTITY USING INDEX erased_users_user;
|
||||
ALTER TABLE event_relations REPLICA IDENTITY USING INDEX event_relations_id;
|
||||
ALTER TABLE federation_inbound_events_staging REPLICA IDENTITY USING INDEX federation_inbound_events_staging_instance_event;
|
||||
ALTER TABLE federation_stream_position REPLICA IDENTITY USING INDEX federation_stream_position_instance;
|
||||
ALTER TABLE ignored_users REPLICA IDENTITY USING INDEX ignored_users_uniqueness;
|
||||
ALTER TABLE insertion_events REPLICA IDENTITY USING INDEX insertion_events_event_id;
|
||||
ALTER TABLE insertion_event_extremities REPLICA IDENTITY USING INDEX insertion_event_extremities_event_id;
|
||||
ALTER TABLE monthly_active_users REPLICA IDENTITY USING INDEX monthly_active_users_users;
|
||||
ALTER TABLE ratelimit_override REPLICA IDENTITY USING INDEX ratelimit_override_idx;
|
||||
ALTER TABLE room_stats_earliest_token REPLICA IDENTITY USING INDEX room_stats_earliest_token_idx;
|
||||
ALTER TABLE room_stats_state REPLICA IDENTITY USING INDEX room_stats_state_room;
|
||||
ALTER TABLE stream_positions REPLICA IDENTITY USING INDEX stream_positions_idx;
|
||||
ALTER TABLE user_directory REPLICA IDENTITY USING INDEX user_directory_user_idx;
|
||||
ALTER TABLE user_directory_search REPLICA IDENTITY USING INDEX user_directory_search_user_idx;
|
||||
ALTER TABLE user_ips REPLICA IDENTITY USING INDEX user_ips_user_token_ip_unique_index;
|
||||
ALTER TABLE user_signature_stream REPLICA IDENTITY USING INDEX user_signature_stream_idx;
|
||||
ALTER TABLE users_in_public_rooms REPLICA IDENTITY USING INDEX users_in_public_rooms_u_idx;
|
||||
ALTER TABLE users_who_share_private_rooms REPLICA IDENTITY USING INDEX users_who_share_private_rooms_u_idx;
|
||||
ALTER TABLE user_threepid_id_server REPLICA IDENTITY USING INDEX user_threepid_id_server_idx;
|
||||
ALTER TABLE worker_locks REPLICA IDENTITY USING INDEX worker_locks_key;
|
||||
|
||||
|
||||
-- Where there are no unique indices, use the entire rows as replica identities.
|
||||
ALTER TABLE current_state_delta_stream REPLICA IDENTITY FULL;
|
||||
ALTER TABLE deleted_pushers REPLICA IDENTITY FULL;
|
||||
ALTER TABLE device_auth_providers REPLICA IDENTITY FULL;
|
||||
ALTER TABLE device_federation_inbox REPLICA IDENTITY FULL;
|
||||
ALTER TABLE device_federation_outbox REPLICA IDENTITY FULL;
|
||||
ALTER TABLE device_inbox REPLICA IDENTITY FULL;
|
||||
ALTER TABLE device_lists_outbound_pokes REPLICA IDENTITY FULL;
|
||||
ALTER TABLE device_lists_stream REPLICA IDENTITY FULL;
|
||||
ALTER TABLE e2e_cross_signing_signatures REPLICA IDENTITY FULL;
|
||||
ALTER TABLE event_auth_chain_links REPLICA IDENTITY FULL;
|
||||
ALTER TABLE event_auth REPLICA IDENTITY FULL;
|
||||
ALTER TABLE event_push_actions_staging REPLICA IDENTITY FULL;
|
||||
ALTER TABLE insertion_event_edges REPLICA IDENTITY FULL;
|
||||
ALTER TABLE local_media_repository_url_cache REPLICA IDENTITY FULL;
|
||||
ALTER TABLE presence_stream REPLICA IDENTITY FULL;
|
||||
ALTER TABLE push_rules_stream REPLICA IDENTITY FULL;
|
||||
ALTER TABLE room_alias_servers REPLICA IDENTITY FULL;
|
||||
ALTER TABLE stream_ordering_to_exterm REPLICA IDENTITY FULL;
|
||||
ALTER TABLE timeline_gaps REPLICA IDENTITY FULL;
|
||||
ALTER TABLE user_daily_visits REPLICA IDENTITY FULL;
|
||||
ALTER TABLE users_pending_deactivation REPLICA IDENTITY FULL;
|
||||
|
||||
-- special cases: unique indices on nullable columns can't be used
|
||||
ALTER TABLE event_push_summary REPLICA IDENTITY FULL;
|
||||
ALTER TABLE event_search REPLICA IDENTITY FULL;
|
||||
ALTER TABLE local_media_repository_thumbnails REPLICA IDENTITY FULL;
|
||||
ALTER TABLE remote_media_cache_thumbnails REPLICA IDENTITY FULL;
|
||||
ALTER TABLE threepid_guest_access_tokens REPLICA IDENTITY FULL;
|
||||
ALTER TABLE user_filters REPLICA IDENTITY FULL; -- sadly the `CHECK` constraint is not enough here
|
||||
@@ -0,0 +1,37 @@
|
||||
/* Copyright 2015, 2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/* We used to create tables called application_services and
|
||||
* application_services_regex, but these are no longer used and are removed in
|
||||
* delta 54.
|
||||
*/
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS application_services_state(
|
||||
as_id TEXT PRIMARY KEY,
|
||||
state VARCHAR(5),
|
||||
last_txn INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS application_services_txns(
|
||||
as_id TEXT NOT NULL,
|
||||
txn_id INTEGER NOT NULL,
|
||||
event_ids TEXT NOT NULL,
|
||||
UNIQUE(as_id, txn_id)
|
||||
);
|
||||
|
||||
CREATE INDEX application_services_txns_id ON application_services_txns (
|
||||
as_id
|
||||
);
|
||||
70
synapse/storage/schema/main/full_schemas/16/event_edges.sql
Normal file
70
synapse/storage/schema/main/full_schemas/16/event_edges.sql
Normal file
@@ -0,0 +1,70 @@
|
||||
/* Copyright 2014-2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/* We used to create tables called event_destinations and
|
||||
* state_forward_extremities, but these are no longer used and are removed in
|
||||
* delta 54.
|
||||
*/
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_forward_extremities(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
UNIQUE (event_id, room_id)
|
||||
);
|
||||
|
||||
CREATE INDEX ev_extrem_room ON event_forward_extremities(room_id);
|
||||
CREATE INDEX ev_extrem_id ON event_forward_extremities(event_id);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_backward_extremities(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
UNIQUE (event_id, room_id)
|
||||
);
|
||||
|
||||
CREATE INDEX ev_b_extrem_room ON event_backward_extremities(room_id);
|
||||
CREATE INDEX ev_b_extrem_id ON event_backward_extremities(event_id);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_edges(
|
||||
event_id TEXT NOT NULL,
|
||||
prev_event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
is_state BOOL NOT NULL, -- true if this is a prev_state edge rather than a regular
|
||||
-- event dag edge.
|
||||
UNIQUE (event_id, prev_event_id, room_id, is_state)
|
||||
);
|
||||
|
||||
CREATE INDEX ev_edges_id ON event_edges(event_id);
|
||||
CREATE INDEX ev_edges_prev_id ON event_edges(prev_event_id);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS room_depth(
|
||||
room_id TEXT NOT NULL,
|
||||
min_depth INTEGER NOT NULL,
|
||||
UNIQUE (room_id)
|
||||
);
|
||||
|
||||
CREATE INDEX room_depth_room ON room_depth(room_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_auth(
|
||||
event_id TEXT NOT NULL,
|
||||
auth_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
UNIQUE (event_id, auth_id, room_id)
|
||||
);
|
||||
|
||||
CREATE INDEX evauth_edges_id ON event_auth(event_id);
|
||||
CREATE INDEX evauth_edges_auth_id ON event_auth(auth_id);
|
||||
@@ -0,0 +1,38 @@
|
||||
/* Copyright 2014-2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/* We used to create tables called event_content_hashes and event_edge_hashes,
|
||||
* but these are no longer used and are removed in delta 54.
|
||||
*/
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_reference_hashes (
|
||||
event_id TEXT,
|
||||
algorithm TEXT,
|
||||
hash bytea,
|
||||
UNIQUE (event_id, algorithm)
|
||||
);
|
||||
|
||||
CREATE INDEX event_reference_hashes_id ON event_reference_hashes(event_id);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_signatures (
|
||||
event_id TEXT,
|
||||
signature_name TEXT,
|
||||
key_id TEXT,
|
||||
signature bytea,
|
||||
UNIQUE (event_id, signature_name, key_id)
|
||||
);
|
||||
|
||||
CREATE INDEX event_signatures_id ON event_signatures(event_id);
|
||||
120
synapse/storage/schema/main/full_schemas/16/im.sql
Normal file
120
synapse/storage/schema/main/full_schemas/16/im.sql
Normal file
@@ -0,0 +1,120 @@
|
||||
/* Copyright 2014-2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/* We used to create tables called room_hosts and feedback,
|
||||
* but these are no longer used and are removed in delta 54.
|
||||
*/
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events(
|
||||
stream_ordering INTEGER PRIMARY KEY,
|
||||
topological_ordering BIGINT NOT NULL,
|
||||
event_id TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
|
||||
-- 'content' used to be created NULLable, but as of delta 50 we drop that constraint.
|
||||
-- the hack we use to drop the constraint doesn't work for an in-memory sqlite
|
||||
-- database, which breaks the sytests. Hence, we no longer make it nullable.
|
||||
content TEXT,
|
||||
|
||||
unrecognized_keys TEXT,
|
||||
processed BOOL NOT NULL,
|
||||
outlier BOOL NOT NULL,
|
||||
depth BIGINT DEFAULT 0 NOT NULL,
|
||||
UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX events_stream_ordering ON events (stream_ordering);
|
||||
CREATE INDEX events_topological_ordering ON events (topological_ordering);
|
||||
CREATE INDEX events_order ON events (topological_ordering, stream_ordering);
|
||||
CREATE INDEX events_room_id ON events (room_id);
|
||||
CREATE INDEX events_order_room ON events (
|
||||
room_id, topological_ordering, stream_ordering
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_json(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
internal_metadata TEXT NOT NULL,
|
||||
json TEXT NOT NULL,
|
||||
UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX event_json_room_id ON event_json(room_id);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS state_events(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
state_key TEXT NOT NULL,
|
||||
prev_state TEXT,
|
||||
UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX state_events_room_id ON state_events (room_id);
|
||||
CREATE INDEX state_events_type ON state_events (type);
|
||||
CREATE INDEX state_events_state_key ON state_events (state_key);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS current_state_events(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
state_key TEXT NOT NULL,
|
||||
UNIQUE (event_id),
|
||||
UNIQUE (room_id, type, state_key)
|
||||
);
|
||||
|
||||
CREATE INDEX current_state_events_room_id ON current_state_events (room_id);
|
||||
CREATE INDEX current_state_events_type ON current_state_events (type);
|
||||
CREATE INDEX current_state_events_state_key ON current_state_events (state_key);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS room_memberships(
|
||||
event_id TEXT NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
sender TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
membership TEXT NOT NULL,
|
||||
UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX room_memberships_room_id ON room_memberships (room_id);
|
||||
CREATE INDEX room_memberships_user_id ON room_memberships (user_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS topics(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
topic TEXT NOT NULL,
|
||||
UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX topics_room_id ON topics(room_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS room_names(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX room_names_room_id ON room_names(room_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS rooms(
|
||||
room_id TEXT PRIMARY KEY NOT NULL,
|
||||
is_public BOOL,
|
||||
creator TEXT
|
||||
);
|
||||
26
synapse/storage/schema/main/full_schemas/16/keys.sql
Normal file
26
synapse/storage/schema/main/full_schemas/16/keys.sql
Normal file
@@ -0,0 +1,26 @@
|
||||
/* Copyright 2014-2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
-- we used to create a table called server_tls_certificates, but this is no
|
||||
-- longer used, and is removed in delta 54.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS server_signature_keys(
|
||||
server_name TEXT, -- Server name.
|
||||
key_id TEXT, -- Key version.
|
||||
from_server TEXT, -- Which key server the key was fetched form.
|
||||
ts_added_ms BIGINT, -- When the key was added.
|
||||
verify_key bytea, -- NACL verification key.
|
||||
UNIQUE (server_name, key_id)
|
||||
);
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user