Merge branch 'develop' into matrix-org-hotfixes

anoa/redirect_instances
Olivier Wilkinson (reivilibre) 2023-04-13 11:55:21 +01:00
commit 7212fc4659
45 changed files with 688 additions and 179 deletions

View File

@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact - name: 📥 Download artifact
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0 uses: dawidd6/action-download-artifact@7132ab516fba5f602fafae6fdd4822afa10db76f # v2.26.1
with: with:
workflow: docs-pr.yaml workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }} run_id: ${{ github.event.workflow_run.id }}

View File

@ -27,9 +27,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
# The dev dependencies aren't exposed in the wheel metadata (at least with current # The dev dependencies aren't exposed in the wheel metadata (at least with current
@ -61,9 +59,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
@ -134,9 +130,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Ensure sytest runs `pip install` - name: Ensure sytest runs `pip install`

View File

@ -35,12 +35,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
@ -70,9 +65,59 @@ jobs:
- run: .ci/scripts/check_lockfile.py - run: .ci/scripts/check_lockfile.py
lint: lint:
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2" runs-on: ubuntu-latest
with: steps:
typechecking-extras: "all" - name: Checkout repository
uses: actions/checkout@v3
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
with:
install-project: "false"
- name: Import order (isort)
run: poetry run isort --check --diff .
- name: Code style (black)
run: poetry run black --check --diff .
- name: Semantic checks (ruff)
# --quiet suppresses the update check.
run: poetry run ruff --quiet .
lint-mypy:
runs-on: ubuntu-latest
name: Typechecking
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
with:
# We want to make use of type hints in optional dependencies too.
extras: all
# We have seen odd mypy failures that were resolved when we started
# installing the project again:
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
# To make CI green, err towards caution and install the project.
install-project: "true"
- name: Install Rust
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
# NB: I have two concerns with this action:
# 1. We occasionally see odd mypy problems that aren't reproducible
# locally with clean caches. I suspect some dodgy caching behaviour.
# 2. The action uses GHA machinery that's deprecated
# (https://github.com/AustinScola/mypy-cache-github-action/issues/277)
# It may be simpler to use actions/cache ourselves to restore .mypy_cache.
- name: Restore/persist mypy's cache
uses: AustinScola/mypy-cache-github-action@df56268388422ee282636ee2c7a9cc55ec644a41
- name: Run mypy
run: poetry run mypy
lint-crlf: lint-crlf:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -104,12 +149,7 @@ jobs:
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
with: with:
@ -126,12 +166,8 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with: with:
toolchain: 1.58.1
components: clippy components: clippy
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
@ -148,10 +184,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@master
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with: with:
toolchain: nightly-2022-12-01 toolchain: nightly-2022-12-01
components: clippy components: clippy
@ -168,10 +201,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@master
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with: with:
# We use nightly so that it correctly groups together imports # We use nightly so that it correctly groups together imports
toolchain: nightly-2022-12-01 toolchain: nightly-2022-12-01
@ -185,6 +215,7 @@ jobs:
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
needs: needs:
- lint - lint
- lint-mypy
- lint-crlf - lint-crlf
- lint-newsfile - lint-newsfile
- lint-pydantic - lint-pydantic
@ -236,12 +267,7 @@ jobs:
postgres:${{ matrix.job.postgres-version }} postgres:${{ matrix.job.postgres-version }}
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
@ -281,12 +307,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
# There aren't wheels for some of the older deps, so we need to install # There aren't wheels for some of the older deps, so we need to install
@ -402,12 +423,7 @@ jobs:
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Run SyTest - name: Run SyTest
@ -547,12 +563,7 @@ jobs:
path: synapse path: synapse
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: actions/setup-go@v4 - uses: actions/setup-go@v4
@ -580,12 +591,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@1.58.1
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with:
toolchain: 1.58.1
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: cargo test - run: cargo test
@ -603,10 +609,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
# There don't seem to be versioned releases of this action per se: for each rust uses: dtolnay/rust-toolchain@master
# version there is a branch which gets constantly rebased on top of master.
# We pin to a specific commit for paranoia's sake.
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d
with: with:
toolchain: nightly-2022-12-01 toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2

View File

@ -25,9 +25,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
@ -50,9 +48,7 @@ jobs:
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1 - uses: matrix-org/setup-python-poetry@v1
@ -89,9 +85,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@fc3253060d0c959bea12a59f10f8391454a0b02d uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: Patch dependencies - name: Patch dependencies

View File

@ -1,5 +1,5 @@
Synapse 1.81.0rc1 (2023-04-04) Synapse 1.81.0 (2023-04-11)
============================== ===========================
Synapse now attempts the versioned appservice paths before falling back to the Synapse now attempts the versioned appservice paths before falling back to the
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). [legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
@ -14,6 +14,27 @@ A future version of Synapse (v1.88.0 or later) will remove support for legacy
application service routes and query parameter authorization. application service routes and query parameter authorization.
No significant changes since 1.81.0rc2.
Synapse 1.81.0rc2 (2023-04-06)
==============================
Bugfixes
--------
- Fix the `set_device_id_for_pushers_txn` background update crash. ([\#15391](https://github.com/matrix-org/synapse/issues/15391))
Internal Changes
----------------
- Update CI to run complement under the latest stable go version. ([\#15403](https://github.com/matrix-org/synapse/issues/15403))
Synapse 1.81.0rc1 (2023-04-04)
==============================
Features Features
-------- --------

1
changelog.d/15181.bugfix Normal file
View File

@ -0,0 +1 @@
Delete server-side backup keys when deactivating an account.

1
changelog.d/15372.misc Normal file
View File

@ -0,0 +1 @@
Bump black from 23.1.0 to 23.3.0.

1
changelog.d/15373.misc Normal file
View File

@ -0,0 +1 @@
Bump pyopenssl from 23.1.0 to 23.1.1.

1
changelog.d/15374.misc Normal file
View File

@ -0,0 +1 @@
Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9.

1
changelog.d/15375.misc Normal file
View File

@ -0,0 +1 @@
Bump types-netaddr from 0.8.0.6 to 0.8.0.7.

1
changelog.d/15376.misc Normal file
View File

@ -0,0 +1 @@
Bump types-opentracing from 2.4.10.3 to 2.4.10.4.

1
changelog.d/15382.misc Normal file
View File

@ -0,0 +1 @@
Improve DB performance of clearing out old data from `stream_ordering_to_exterm`.

View File

@ -1 +0,0 @@
Fix the `set_device_id_for_pushers_txn` background update crash.

1
changelog.d/15393.misc Normal file
View File

@ -0,0 +1 @@
Implement [MSC3989](https://github.com/matrix-org/matrix-spec-proposals/pull/3989) redaction algorithm.

1
changelog.d/15394.misc Normal file
View File

@ -0,0 +1 @@
Implement [MSC2175](https://github.com/matrix-org/matrix-doc/pull/2175) to stop adding `creator` to create events.

View File

@ -1 +0,0 @@
Update CI to run complement under the latest stable go version.

1
changelog.d/15404.misc Normal file
View File

@ -0,0 +1 @@
Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1.

1
changelog.d/15406.misc Normal file
View File

@ -0,0 +1 @@
Trust dtonlay/rust-toolchain in CI.

1
changelog.d/15409.misc Normal file
View File

@ -0,0 +1 @@
Explicitly install Synapse during typechecking in CI.

1
changelog.d/15412.misc Normal file
View File

@ -0,0 +1 @@
Bump parameterized from 0.8.1 to 0.9.0.

1
changelog.d/15413.misc Normal file
View File

@ -0,0 +1 @@
Bump types-pillow from 9.4.0.17 to 9.4.0.19.

1
changelog.d/15414.misc Normal file
View File

@ -0,0 +1 @@
Bump sentry-sdk from 1.17.0 to 1.19.1.

1
changelog.d/15415.misc Normal file
View File

@ -0,0 +1 @@
Bump immutabledict from 2.2.3 to 2.2.4.

1
changelog.d/15425.bugfix Normal file
View File

@ -0,0 +1 @@
Synapse now correctly fails to start if the config option `app_service_config_files` is not a list.

12
debian/changelog vendored
View File

@ -1,3 +1,15 @@
matrix-synapse-py3 (1.81.0) stable; urgency=medium
* New Synapse release 1.81.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Apr 2023 14:18:35 +0100
matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium
* New Synapse release 1.81.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Apr 2023 16:07:54 +0100
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
* New Synapse release 1.81.0rc1. * New Synapse release 1.81.0rc1.

103
poetry.lock generated
View File

@ -156,37 +156,37 @@ lxml = ["lxml"]
[[package]] [[package]]
name = "black" name = "black"
version = "23.1.0" version = "23.3.0"
description = "The uncompromising code formatter." description = "The uncompromising code formatter."
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
] ]
[package.dependencies] [package.dependencies]
@ -855,14 +855,14 @@ files = [
[[package]] [[package]]
name = "immutabledict" name = "immutabledict"
version = "2.2.3" version = "2.2.4"
description = "Immutable wrapper around dictionaries (a fork of frozendict)" description = "Immutable wrapper around dictionaries (a fork of frozendict)"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.7,<4.0" python-versions = ">=3.7,<4.0"
files = [ files = [
{file = "immutabledict-2.2.3-py3-none-any.whl", hash = "sha256:a7b078ebcc4a58ddc73b55f808b26e7c8c2d5183fad325615112689e1a63e714"}, {file = "immutabledict-2.2.4-py3-none-any.whl", hash = "sha256:c827715c147d2364522f9a7709cc424c7001015274a3c705250e673605bde64b"},
{file = "immutabledict-2.2.3.tar.gz", hash = "sha256:0e1e8a3f2b3ff062daa19795f947e9ec7a58add269d44e34d3ab4319e1343853"}, {file = "immutabledict-2.2.4.tar.gz", hash = "sha256:3bedc0741faaa2846f6edf5c29183f993da3abaff6a5961bb70a5659bb9e68ab"},
] ]
[[package]] [[package]]
@ -1605,14 +1605,14 @@ files = [
[[package]] [[package]]
name = "parameterized" name = "parameterized"
version = "0.8.1" version = "0.9.0"
description = "Parameterized testing with any Python test framework" description = "Parameterized testing with any Python test framework"
category = "main" category = "main"
optional = false optional = false
python-versions = "*" python-versions = ">=3.7"
files = [ files = [
{file = "parameterized-0.8.1-py2.py3-none-any.whl", hash = "sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9"}, {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"},
{file = "parameterized-0.8.1.tar.gz", hash = "sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c"}, {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"},
] ]
[package.extras] [package.extras]
@ -2060,14 +2060,14 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
[[package]] [[package]]
name = "pyopenssl" name = "pyopenssl"
version = "23.1.0" version = "23.1.1"
description = "Python wrapper module around the OpenSSL library" description = "Python wrapper module around the OpenSSL library"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
{file = "pyOpenSSL-23.1.0-py3-none-any.whl", hash = "sha256:fb96e936866ad65662c22d0de84ca0fba58397893cdfe0f01334fa93382af23c"}, {file = "pyOpenSSL-23.1.1-py3-none-any.whl", hash = "sha256:9e0c526404a210df9d2b18cd33364beadb0dc858a739b885677bc65e105d4a4c"},
{file = "pyOpenSSL-23.1.0.tar.gz", hash = "sha256:8cb78010a1eb2c8e24b851693b7b04dfe9b1dc0a5ab3843927b10a85b1dfbb2e"}, {file = "pyOpenSSL-23.1.1.tar.gz", hash = "sha256:841498b9bec61623b1b6c47ebbc02367c07d60e0e195f19790817f10cc8db0b7"},
] ]
[package.dependencies] [package.dependencies]
@ -2382,14 +2382,14 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]] [[package]]
name = "sentry-sdk" name = "sentry-sdk"
version = "1.17.0" version = "1.19.1"
description = "Python client for Sentry (https://sentry.io)" description = "Python client for Sentry (https://sentry.io)"
category = "main" category = "main"
optional = true optional = true
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "sentry-sdk-1.17.0.tar.gz", hash = "sha256:ad40860325c94d1a656da70fba5a7c4dbb2f6809d3cc2d00f74ca0b608330f14"}, {file = "sentry-sdk-1.19.1.tar.gz", hash = "sha256:7ae78bd921981a5010ab540d6bdf3b793659a4db8cccf7f16180702d48a80d84"},
{file = "sentry_sdk-1.17.0-py2.py3-none-any.whl", hash = "sha256:3c4e898f7a3edf5a2042cd0dcab6ee124e2112189228c272c08ad15d3850c201"}, {file = "sentry_sdk-1.19.1-py2.py3-none-any.whl", hash = "sha256:885a11c69df23e53eb281d003b9ff15a5bdfa43d8a2a53589be52104a1b4582f"},
] ]
[package.dependencies] [package.dependencies]
@ -2407,6 +2407,7 @@ django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"] falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"] fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"]
grpcio = ["grpcio (>=1.21.1)"]
httpx = ["httpx (>=0.16.0)"] httpx = ["httpx (>=0.16.0)"]
huey = ["huey (>=2)"] huey = ["huey (>=2)"]
opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
@ -3033,50 +3034,50 @@ files = [
[[package]] [[package]]
name = "types-netaddr" name = "types-netaddr"
version = "0.8.0.6" version = "0.8.0.7"
description = "Typing stubs for netaddr" description = "Typing stubs for netaddr"
category = "dev" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "types-netaddr-0.8.0.6.tar.gz", hash = "sha256:e5048640c2412e7ea2d3eb02c94ae1b50442b2c7a50a7c48e957676139cdf19b"}, {file = "types-netaddr-0.8.0.7.tar.gz", hash = "sha256:3362864fa0258782d449b91707f37e55f62290b4f438974a08758b498169e109"},
{file = "types_netaddr-0.8.0.6-py3-none-any.whl", hash = "sha256:d4d40d1ba35430a4e4c929596542cd37e6831f5d08676b33dc84e06e01a840f6"}, {file = "types_netaddr-0.8.0.7-py3-none-any.whl", hash = "sha256:a540cdfb2f858a0509ce5a4e4fcc80ef11b19f10a2473e48d32217af517818c0"},
] ]
[[package]] [[package]]
name = "types-opentracing" name = "types-opentracing"
version = "2.4.10.3" version = "2.4.10.4"
description = "Typing stubs for opentracing" description = "Typing stubs for opentracing"
category = "dev" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "types-opentracing-2.4.10.3.tar.gz", hash = "sha256:b277f114265b41216714f9c77dffcab57038f1730fd141e2c55c5c9f6f2caa87"}, {file = "types-opentracing-2.4.10.4.tar.gz", hash = "sha256:347040c9da4ada7d3c795659912c95d98c5651e242e8eaa0344815fee5bb97e2"},
{file = "types_opentracing-2.4.10.3-py3-none-any.whl", hash = "sha256:60244d718fcd9de7043645ecaf597222d550432507098ab2e6268f7b589a7fa7"}, {file = "types_opentracing-2.4.10.4-py3-none-any.whl", hash = "sha256:73c9b958eea3df6c4906ebf3865608a562dd9981c1bbc75a373a583c613bed56"},
] ]
[[package]] [[package]]
name = "types-pillow" name = "types-pillow"
version = "9.4.0.17" version = "9.4.0.19"
description = "Typing stubs for Pillow" description = "Typing stubs for Pillow"
category = "dev" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "types-Pillow-9.4.0.17.tar.gz", hash = "sha256:7f0e871d2d46fbb6bc7deca3e02dc552cf9c1e8b49deb9595509551be3954e49"}, {file = "types-Pillow-9.4.0.19.tar.gz", hash = "sha256:a04401181979049977e318dae4523ab5ae8246314fc68fcf50b043ac885a5468"},
{file = "types_Pillow-9.4.0.17-py3-none-any.whl", hash = "sha256:f8b848a05f17cb4d53d245c59bf560372b9778d4cfaf9705f6245009bf9f65f3"}, {file = "types_Pillow-9.4.0.19-py3-none-any.whl", hash = "sha256:b55f2508be21e68a39f0a41830f1f1725aba0888e727e2eccd253c78cd5357a5"},
] ]
[[package]] [[package]]
name = "types-psycopg2" name = "types-psycopg2"
version = "2.9.21.8" version = "2.9.21.9"
description = "Typing stubs for psycopg2" description = "Typing stubs for psycopg2"
category = "dev" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "types-psycopg2-2.9.21.8.tar.gz", hash = "sha256:b629440ffcfdebd742fab07f777ff69aefdd19394a138c18e921a1964c3cf5f6"}, {file = "types-psycopg2-2.9.21.9.tar.gz", hash = "sha256:388dc36a04551632289c4aaf1fc5b91e147654b165db896d094844e216f22bf5"},
{file = "types_psycopg2-2.9.21.8-py3-none-any.whl", hash = "sha256:e747fbec6e0e2502b625bc7686d13cc62fc170e8ae920e5ba27fac946778eeb9"}, {file = "types_psycopg2-2.9.21.9-py3-none-any.whl", hash = "sha256:0332525fb9d3031d3da46f091e7d40b2c4d4958e9c00d2b4c1eaaa9f8ef9de4e"},
] ]
[[package]] [[package]]

View File

@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry] [tool.poetry]
name = "matrix-synapse" name = "matrix-synapse"
version = "1.81.0rc1" version = "1.81.0"
description = "Homeserver for the Matrix decentralised comms protocol" description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"] authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0" license = "Apache-2.0"

View File

@ -59,6 +59,7 @@ from synapse.storage.databases.main.account_data import AccountDataWorkerStore
from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore
from synapse.storage.databases.main.event_push_actions import EventPushActionsStore from synapse.storage.databases.main.event_push_actions import EventPushActionsStore
from synapse.storage.databases.main.events_bg_updates import ( from synapse.storage.databases.main.events_bg_updates import (
@ -225,6 +226,7 @@ class Store(
MainStateBackgroundUpdateStore, MainStateBackgroundUpdateStore,
UserDirectoryBackgroundUpdateStore, UserDirectoryBackgroundUpdateStore,
EndToEndKeyBackgroundStore, EndToEndKeyBackgroundStore,
EndToEndRoomKeyBackgroundStore,
StatsStore, StatsStore,
AccountDataWorkerStore, AccountDataWorkerStore,
PushRuleStore, PushRuleStore,

View File

@ -215,6 +215,8 @@ class EventContentFields:
FEDERATE: Final = "m.federate" FEDERATE: Final = "m.federate"
# The creator of the room, as used in `m.room.create` events. # The creator of the room, as used in `m.room.create` events.
#
# This is deprecated in MSC2175.
ROOM_CREATOR: Final = "creator" ROOM_CREATOR: Final = "creator"
# Used in m.room.guest_access events. # Used in m.room.guest_access events.

View File

@ -78,6 +78,8 @@ class RoomVersion:
# MSC2209: Check 'notifications' key while verifying # MSC2209: Check 'notifications' key while verifying
# m.room.power_levels auth rules. # m.room.power_levels auth rules.
limit_notifications_power_levels: bool limit_notifications_power_levels: bool
# MSC2175: No longer include the creator in m.room.create events.
msc2175_implicit_room_creator: bool
# MSC2174/MSC2176: Apply updated redaction rules algorithm. # MSC2174/MSC2176: Apply updated redaction rules algorithm.
msc2176_redaction_rules: bool msc2176_redaction_rules: bool
# MSC3083: Support the 'restricted' join_rule. # MSC3083: Support the 'restricted' join_rule.
@ -104,6 +106,8 @@ class RoomVersion:
# support the flag. Unknown flags are ignored by the evaluator, making conditions # support the flag. Unknown flags are ignored by the evaluator, making conditions
# fail if used. # fail if used.
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
# MSC3989: Redact the origin field.
msc3989_redaction_rules: bool
class RoomVersions: class RoomVersions:
@ -116,6 +120,7 @@ class RoomVersions:
special_case_aliases_auth=True, special_case_aliases_auth=True,
strict_canonicaljson=False, strict_canonicaljson=False,
limit_notifications_power_levels=False, limit_notifications_power_levels=False,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -125,6 +130,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V2 = RoomVersion( V2 = RoomVersion(
"2", "2",
@ -135,6 +141,7 @@ class RoomVersions:
special_case_aliases_auth=True, special_case_aliases_auth=True,
strict_canonicaljson=False, strict_canonicaljson=False,
limit_notifications_power_levels=False, limit_notifications_power_levels=False,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -144,6 +151,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V3 = RoomVersion( V3 = RoomVersion(
"3", "3",
@ -154,6 +162,7 @@ class RoomVersions:
special_case_aliases_auth=True, special_case_aliases_auth=True,
strict_canonicaljson=False, strict_canonicaljson=False,
limit_notifications_power_levels=False, limit_notifications_power_levels=False,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -163,6 +172,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V4 = RoomVersion( V4 = RoomVersion(
"4", "4",
@ -173,6 +183,7 @@ class RoomVersions:
special_case_aliases_auth=True, special_case_aliases_auth=True,
strict_canonicaljson=False, strict_canonicaljson=False,
limit_notifications_power_levels=False, limit_notifications_power_levels=False,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -182,6 +193,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V5 = RoomVersion( V5 = RoomVersion(
"5", "5",
@ -192,6 +204,7 @@ class RoomVersions:
special_case_aliases_auth=True, special_case_aliases_auth=True,
strict_canonicaljson=False, strict_canonicaljson=False,
limit_notifications_power_levels=False, limit_notifications_power_levels=False,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -201,6 +214,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V6 = RoomVersion( V6 = RoomVersion(
"6", "6",
@ -211,6 +225,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -220,6 +235,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
MSC2176 = RoomVersion( MSC2176 = RoomVersion(
"org.matrix.msc2176", "org.matrix.msc2176",
@ -230,6 +246,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=True, msc2176_redaction_rules=True,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -239,6 +256,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V7 = RoomVersion( V7 = RoomVersion(
"7", "7",
@ -249,6 +267,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -258,6 +277,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V8 = RoomVersion( V8 = RoomVersion(
"8", "8",
@ -268,6 +288,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=True, msc3083_join_rules=True,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -277,6 +298,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V9 = RoomVersion( V9 = RoomVersion(
"9", "9",
@ -287,6 +309,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=True, msc3083_join_rules=True,
msc3375_redaction_rules=True, msc3375_redaction_rules=True,
@ -296,6 +319,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
MSC3787 = RoomVersion( MSC3787 = RoomVersion(
"org.matrix.msc3787", "org.matrix.msc3787",
@ -306,6 +330,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=True, msc3083_join_rules=True,
msc3375_redaction_rules=True, msc3375_redaction_rules=True,
@ -315,6 +340,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=True, msc3787_knock_restricted_join_rule=True,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
V10 = RoomVersion( V10 = RoomVersion(
"10", "10",
@ -325,6 +351,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=True, msc3083_join_rules=True,
msc3375_redaction_rules=True, msc3375_redaction_rules=True,
@ -334,6 +361,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=True, msc3787_knock_restricted_join_rule=True,
msc3667_int_only_power_levels=True, msc3667_int_only_power_levels=True,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
MSC2716v4 = RoomVersion( MSC2716v4 = RoomVersion(
"org.matrix.msc2716v4", "org.matrix.msc2716v4",
@ -344,6 +372,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=False, msc3083_join_rules=False,
msc3375_redaction_rules=False, msc3375_redaction_rules=False,
@ -353,6 +382,7 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False, msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False, msc3667_int_only_power_levels=False,
msc3931_push_features=(), msc3931_push_features=(),
msc3989_redaction_rules=False,
) )
MSC1767v10 = RoomVersion( MSC1767v10 = RoomVersion(
# MSC1767 (Extensible Events) based on room version "10" # MSC1767 (Extensible Events) based on room version "10"
@ -364,6 +394,7 @@ class RoomVersions:
special_case_aliases_auth=False, special_case_aliases_auth=False,
strict_canonicaljson=True, strict_canonicaljson=True,
limit_notifications_power_levels=True, limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False, msc2176_redaction_rules=False,
msc3083_join_rules=True, msc3083_join_rules=True,
msc3375_redaction_rules=True, msc3375_redaction_rules=True,
@ -373,6 +404,28 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=True, msc3787_knock_restricted_join_rule=True,
msc3667_int_only_power_levels=True, msc3667_int_only_power_levels=True,
msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,), msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,),
msc3989_redaction_rules=False,
)
MSC3989 = RoomVersion(
"org.matrix.msc3989",
RoomDisposition.UNSTABLE,
EventFormatVersions.ROOM_V4_PLUS,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2175_implicit_room_creator=False,
msc2176_redaction_rules=False,
msc3083_join_rules=True,
msc3375_redaction_rules=True,
msc2403_knocking=True,
msc2716_historical=False,
msc2716_redactions=False,
msc3787_knock_restricted_join_rule=True,
msc3667_int_only_power_levels=True,
msc3931_push_features=(),
msc3989_redaction_rules=True,
) )
@ -392,6 +445,7 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
RoomVersions.MSC3787, RoomVersions.MSC3787,
RoomVersions.V10, RoomVersions.V10,
RoomVersions.MSC2716v4, RoomVersions.MSC2716v4,
RoomVersions.MSC3989,
) )
} }

View File

@ -33,6 +33,16 @@ class AppServiceConfig(Config):
def read_config(self, config: JsonDict, **kwargs: Any) -> None: def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.app_service_config_files = config.get("app_service_config_files", []) self.app_service_config_files = config.get("app_service_config_files", [])
if not isinstance(self.app_service_config_files, list) or not all(
type(x) is str for x in self.app_service_config_files
):
# type-ignore: this function gets arbitrary json value; we do use this path.
raise ConfigError(
"Expected '%s' to be a list of AS config files:"
% (self.app_service_config_files),
"app_service_config_files",
)
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False) self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
@ -40,10 +50,6 @@ def load_appservices(
hostname: str, config_files: List[str] hostname: str, config_files: List[str]
) -> List[ApplicationService]: ) -> List[ApplicationService]:
"""Returns a list of Application Services from the config files.""" """Returns a list of Application Services from the config files."""
if not isinstance(config_files, list):
# type-ignore: this function gets arbitrary json value; we do use this path.
logger.warning("Expected %s to be a list of AS config files.", config_files) # type: ignore[unreachable]
return []
# Dicts of value -> filename # Dicts of value -> filename
seen_as_tokens: Dict[str, str] = {} seen_as_tokens: Dict[str, str] = {}

View File

@ -455,8 +455,11 @@ def _check_create(event: "EventBase") -> None:
"room appears to have unsupported version %s" % (room_version_prop,), "room appears to have unsupported version %s" % (room_version_prop,),
) )
# 1.4 If content has no creator field, reject. # 1.4 If content has no creator field, reject if the room version requires it.
if EventContentFields.ROOM_CREATOR not in event.content: if (
not event.room_version.msc2175_implicit_room_creator
and EventContentFields.ROOM_CREATOR not in event.content
):
raise AuthError(403, "Create event lacks a 'creator' property") raise AuthError(403, "Create event lacks a 'creator' property")
@ -491,7 +494,11 @@ def _is_membership_change_allowed(
key = (EventTypes.Create, "") key = (EventTypes.Create, "")
create = auth_events.get(key) create = auth_events.get(key)
if create and event.prev_event_ids()[0] == create.event_id: if create and event.prev_event_ids()[0] == create.event_id:
if create.content["creator"] == event.state_key: if room_version.msc2175_implicit_room_creator:
creator = create.sender
else:
creator = create.content[EventContentFields.ROOM_CREATOR]
if creator == event.state_key:
return return
target_user_id = event.state_key target_user_id = event.state_key
@ -1004,10 +1011,14 @@ def get_user_power_level(user_id: str, auth_events: StateMap["EventBase"]) -> in
# that. # that.
key = (EventTypes.Create, "") key = (EventTypes.Create, "")
create_event = auth_events.get(key) create_event = auth_events.get(key)
if create_event is not None and create_event.content["creator"] == user_id: if create_event is not None:
return 100 if create_event.room_version.msc2175_implicit_room_creator:
else: creator = create_event.sender
return 0 else:
creator = create_event.content[EventContentFields.ROOM_CREATOR]
if creator == user_id:
return 100
return 0
def get_named_level(auth_events: StateMap["EventBase"], name: str, default: int) -> int: def get_named_level(auth_events: StateMap["EventBase"], name: str, default: int) -> int:

View File

@ -106,7 +106,6 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic
"depth", "depth",
"prev_events", "prev_events",
"auth_events", "auth_events",
"origin",
"origin_server_ts", "origin_server_ts",
] ]
@ -114,6 +113,10 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic
if not room_version.msc2176_redaction_rules: if not room_version.msc2176_redaction_rules:
allowed_keys.extend(["prev_state", "membership"]) allowed_keys.extend(["prev_state", "membership"])
# Room versions before MSC3989 kept the origin field.
if not room_version.msc3989_redaction_rules:
allowed_keys.append("origin")
event_type = event_dict["type"] event_type = event_dict["type"]
new_content = {} new_content = {}

View File

@ -176,6 +176,9 @@ class DeactivateAccountHandler:
# Remove account data (including ignored users and push rules). # Remove account data (including ignored users and push rules).
await self.store.purge_account_data_for_user(user_id) await self.store.purge_account_data_for_user(user_id)
# Delete any server-side backup keys
await self.store.bulk_delete_backup_keys_and_versions_for_user(user_id)
# Let modules know the user has been deactivated. # Let modules know the user has been deactivated.
await self._third_party_rules.on_user_deactivation_status_changed( await self._third_party_rules.on_user_deactivation_status_changed(
user_id, user_id,

View File

@ -215,6 +215,16 @@ class DeviceWorkerHandler:
possibly_changed = set(changed) possibly_changed = set(changed)
possibly_left = set() possibly_left = set()
for room_id in rooms_changed: for room_id in rooms_changed:
# Check if the forward extremities have changed. If not then we know
# the current state won't have changed, and so we can skip this room.
try:
if not await self.store.have_room_forward_extremities_changed_since(
room_id, stream_ordering
):
continue
except errors.StoreError:
pass
current_state_ids = await self._state_storage.get_current_state_ids( current_state_ids = await self._state_storage.get_current_state_ids(
room_id, await_full_state=False room_id, await_full_state=False
) )

View File

@ -1515,7 +1515,10 @@ class FederationEventHandler:
# support it or the event is not from the room creator. # support it or the event is not from the room creator.
room_version = await self._store.get_room_version(marker_event.room_id) room_version = await self._store.get_room_version(marker_event.room_id)
create_event = await self._store.get_create_event_for_room(marker_event.room_id) create_event = await self._store.get_create_event_for_room(marker_event.room_id)
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) if not room_version.msc2175_implicit_room_creator:
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR)
else:
room_creator = create_event.sender
if not room_version.msc2716_historical and ( if not room_version.msc2716_historical and (
not self._config.experimental.msc2716_enabled not self._config.experimental.msc2716_enabled
or marker_event.sender != room_creator or marker_event.sender != room_creator

View File

@ -1909,7 +1909,12 @@ class EventCreationHandler:
room_version_obj = KNOWN_ROOM_VERSIONS[room_version] room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
create_event = await self.store.get_create_event_for_room(event.room_id) create_event = await self.store.get_create_event_for_room(event.room_id)
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) if not room_version_obj.msc2175_implicit_room_creator:
room_creator = create_event.content.get(
EventContentFields.ROOM_CREATOR
)
else:
room_creator = create_event.sender
# Only check an insertion event if the room version # Only check an insertion event if the room version
# supports it or the event is from the room creator. # supports it or the event is from the room creator.

View File

@ -567,6 +567,7 @@ class RoomCreationHandler:
await self._send_events_for_new_room( await self._send_events_for_new_room(
requester, requester,
new_room_id, new_room_id,
new_room_version,
# we expect to override all the presets with initial_state, so this is # we expect to override all the presets with initial_state, so this is
# somewhat arbitrary. # somewhat arbitrary.
room_config={"preset": RoomCreationPreset.PRIVATE_CHAT}, room_config={"preset": RoomCreationPreset.PRIVATE_CHAT},
@ -922,6 +923,7 @@ class RoomCreationHandler:
) = await self._send_events_for_new_room( ) = await self._send_events_for_new_room(
requester, requester,
room_id, room_id,
room_version,
room_config=config, room_config=config,
invite_list=invite_list, invite_list=invite_list,
initial_state=initial_state, initial_state=initial_state,
@ -998,6 +1000,7 @@ class RoomCreationHandler:
self, self,
creator: Requester, creator: Requester,
room_id: str, room_id: str,
room_version: RoomVersion,
room_config: JsonDict, room_config: JsonDict,
invite_list: List[str], invite_list: List[str],
initial_state: MutableStateMap, initial_state: MutableStateMap,
@ -1020,6 +1023,8 @@ class RoomCreationHandler:
the user requesting the room creation the user requesting the room creation
room_id: room_id:
room id for the room being created room id for the room being created
room_version:
The room version of the new room.
room_config: room_config:
A dict of configuration options. This will be the body of A dict of configuration options. This will be the body of
a /createRoom request; see a /createRoom request; see
@ -1053,14 +1058,6 @@ class RoomCreationHandler:
# (as this info can't be pulled from the db) # (as this info can't be pulled from the db)
state_map: MutableStateMap[str] = {} state_map: MutableStateMap[str] = {}
def create_event_dict(etype: str, content: JsonDict, **kwargs: Any) -> JsonDict:
e = {"type": etype, "content": content}
e.update(event_keys)
e.update(kwargs)
return e
async def create_event( async def create_event(
etype: str, etype: str,
content: JsonDict, content: JsonDict,
@ -1083,7 +1080,10 @@ class RoomCreationHandler:
nonlocal depth nonlocal depth
nonlocal prev_event nonlocal prev_event
event_dict = create_event_dict(etype, content, **kwargs) # Create the event dictionary.
event_dict = {"type": etype, "content": content}
event_dict.update(event_keys)
event_dict.update(kwargs)
( (
new_event, new_event,
@ -1120,7 +1120,9 @@ class RoomCreationHandler:
400, f"'{preset_config}' is not a valid preset", errcode=Codes.BAD_JSON 400, f"'{preset_config}' is not a valid preset", errcode=Codes.BAD_JSON
) )
creation_content.update({"creator": creator_id}) # MSC2175 removes the creator field from the create event.
if not room_version.msc2175_implicit_room_creator:
creation_content["creator"] = creator_id
creation_event, unpersisted_creation_context = await create_event( creation_event, unpersisted_creation_context = await create_event(
EventTypes.Create, creation_content, False EventTypes.Create, creation_content, False
) )

View File

@ -13,17 +13,24 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from typing import Dict, Iterable, Mapping, Optional, Tuple, cast from typing import TYPE_CHECKING, Dict, Iterable, Mapping, Optional, Tuple, cast
from typing_extensions import Literal, TypedDict from typing_extensions import Literal, TypedDict
from synapse.api.errors import StoreError from synapse.api.errors import StoreError
from synapse.logging.opentracing import log_kv, trace from synapse.logging.opentracing import log_kv, trace
from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import LoggingTransaction from synapse.storage.database import (
DatabasePool,
LoggingDatabaseConnection,
LoggingTransaction,
)
from synapse.types import JsonDict, JsonSerializable, StreamKeyType from synapse.types import JsonDict, JsonSerializable, StreamKeyType
from synapse.util import json_encoder from synapse.util import json_encoder
if TYPE_CHECKING:
from synapse.server import HomeServer
class RoomKey(TypedDict): class RoomKey(TypedDict):
"""`KeyBackupData` in the Matrix spec. """`KeyBackupData` in the Matrix spec.
@ -37,7 +44,82 @@ class RoomKey(TypedDict):
session_data: JsonSerializable session_data: JsonSerializable
class EndToEndRoomKeyStore(SQLBaseStore): class EndToEndRoomKeyBackgroundStore(SQLBaseStore):
def __init__(
self,
database: DatabasePool,
db_conn: LoggingDatabaseConnection,
hs: "HomeServer",
):
super().__init__(database, db_conn, hs)
self.db_pool.updates.register_background_update_handler(
"delete_e2e_backup_keys_for_deactivated_users",
self._delete_e2e_backup_keys_for_deactivated_users,
)
def _delete_keys_txn(self, txn: LoggingTransaction, user_id: str) -> None:
self.db_pool.simple_delete_txn(
txn,
table="e2e_room_keys",
keyvalues={"user_id": user_id},
)
self.db_pool.simple_delete_txn(
txn,
table="e2e_room_keys_versions",
keyvalues={"user_id": user_id},
)
async def _delete_e2e_backup_keys_for_deactivated_users(
self, progress: JsonDict, batch_size: int
) -> int:
"""
Retroactively purges account data for users that have already been deactivated.
Gets run as a background update caused by a schema delta.
"""
last_user: str = progress.get("last_user", "")
def _delete_backup_keys_for_deactivated_users_txn(
txn: LoggingTransaction,
) -> int:
sql = """
SELECT name FROM users
WHERE deactivated = ? and name > ?
ORDER BY name ASC
LIMIT ?
"""
txn.execute(sql, (1, last_user, batch_size))
users = [row[0] for row in txn]
for user in users:
self._delete_keys_txn(txn, user)
if users:
self.db_pool.updates._background_update_progress_txn(
txn,
"delete_e2e_backup_keys_for_deactivated_users",
{"last_user": users[-1]},
)
return len(users)
number_deleted = await self.db_pool.runInteraction(
"_delete_backup_keys_for_deactivated_users",
_delete_backup_keys_for_deactivated_users_txn,
)
if number_deleted < batch_size:
await self.db_pool.updates._end_background_update(
"delete_e2e_backup_keys_for_deactivated_users"
)
return number_deleted
class EndToEndRoomKeyStore(EndToEndRoomKeyBackgroundStore):
"""The store for end to end room key backups. """The store for end to end room key backups.
See https://spec.matrix.org/v1.1/client-server-api/#server-side-key-backups See https://spec.matrix.org/v1.1/client-server-api/#server-side-key-backups
@ -550,3 +632,29 @@ class EndToEndRoomKeyStore(SQLBaseStore):
await self.db_pool.runInteraction( await self.db_pool.runInteraction(
"delete_e2e_room_keys_version", _delete_e2e_room_keys_version_txn "delete_e2e_room_keys_version", _delete_e2e_room_keys_version_txn
) )
async def bulk_delete_backup_keys_and_versions_for_user(self, user_id: str) -> None:
"""
Bulk deletes all backup room keys and versions for a given user.
Args:
user_id: the user whose backup keys and versions we're deleting
"""
def _delete_all_e2e_room_keys_and_versions_txn(txn: LoggingTransaction) -> None:
self.db_pool.simple_delete_txn(
txn,
table="e2e_room_keys",
keyvalues={"user_id": user_id},
)
self.db_pool.simple_delete_txn(
txn,
table="e2e_room_keys_versions",
keyvalues={"user_id": user_id},
)
await self.db_pool.runInteraction(
"delete_all_e2e_room_keys_and_versions",
_delete_all_e2e_room_keys_and_versions_txn,
)

View File

@ -1171,6 +1171,38 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
return int(min_depth) if min_depth is not None else None return int(min_depth) if min_depth is not None else None
async def have_room_forward_extremities_changed_since(
self,
room_id: str,
stream_ordering: int,
) -> bool:
"""Check if the forward extremities in a room have changed since the
given stream ordering
Throws a StoreError if we have since purged the index for
stream_orderings from that point.
"""
if stream_ordering <= self.stream_ordering_month_ago: # type: ignore[attr-defined]
raise StoreError(400, f"stream_ordering too old {stream_ordering}")
sql = """
SELECT 1 FROM stream_ordering_to_exterm
WHERE stream_ordering > ? AND room_id = ?
LIMIT 1
"""
def have_room_forward_extremities_changed_since_txn(
txn: LoggingTransaction,
) -> bool:
txn.execute(sql, (stream_ordering, room_id))
return txn.fetchone() is not None
return await self.db_pool.runInteraction(
"have_room_forward_extremities_changed_since",
have_room_forward_extremities_changed_since_txn,
)
@cancellable @cancellable
async def get_forward_extremities_for_room_at_stream_ordering( async def get_forward_extremities_for_room_at_stream_ordering(
self, room_id: str, stream_ordering: int self, room_id: str, stream_ordering: int
@ -1232,10 +1264,17 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
txn.execute(sql, (stream_ordering, room_id)) txn.execute(sql, (stream_ordering, room_id))
return [event_id for event_id, in txn] return [event_id for event_id, in txn]
return await self.db_pool.runInteraction( event_ids = await self.db_pool.runInteraction(
"get_forward_extremeties_for_room", get_forward_extremeties_for_room_txn "get_forward_extremeties_for_room", get_forward_extremeties_for_room_txn
) )
# If we didn't find any IDs, then we must have cleared out the
# associated `stream_ordering_to_exterm`.
if not event_ids:
raise StoreError(400, "stream_ordering too old %s" % (stream_ordering,))
return event_ids
def _get_connected_batch_event_backfill_results_txn( def _get_connected_batch_event_backfill_results_txn(
self, txn: LoggingTransaction, insertion_event_id: str, limit: int self, txn: LoggingTransaction, insertion_event_id: str, limit: int
) -> List[BackfillQueueNavigationItem]: ) -> List[BackfillQueueNavigationItem]:
@ -1664,19 +1703,12 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
@wrap_as_background_process("delete_old_forward_extrem_cache") @wrap_as_background_process("delete_old_forward_extrem_cache")
async def _delete_old_forward_extrem_cache(self) -> None: async def _delete_old_forward_extrem_cache(self) -> None:
def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None: def _delete_old_forward_extrem_cache_txn(txn: LoggingTransaction) -> None:
# Delete entries older than a month, while making sure we don't delete
# the only entries for a room.
sql = """ sql = """
DELETE FROM stream_ordering_to_exterm DELETE FROM stream_ordering_to_exterm
WHERE WHERE stream_ordering < ?
room_id IN (
SELECT room_id
FROM stream_ordering_to_exterm
WHERE stream_ordering > ?
) AND stream_ordering < ?
""" """
txn.execute( txn.execute(
sql, (self.stream_ordering_month_ago, self.stream_ordering_month_ago) # type: ignore[attr-defined] sql, (self.stream_ordering_month_ago) # type: ignore[attr-defined]
) )
await self.db_pool.runInteraction( await self.db_pool.runInteraction(

View File

@ -1998,6 +1998,9 @@ class RoomBackgroundUpdateStore(SQLBaseStore):
for room_id, event_json in room_id_to_create_event_results: for room_id, event_json in room_id_to_create_event_results:
event_dict = db_to_json(event_json) event_dict = db_to_json(event_json)
# The creator property might not exist in newer room versions, but
# for those versions the creator column should be properly populate
# during room creation.
creator = event_dict.get("content").get(EventContentFields.ROOM_CREATOR) creator = event_dict.get("content").get(EventContentFields.ROOM_CREATOR)
self.db_pool.simple_update_txn( self.db_pool.simple_update_txn(
@ -2132,12 +2135,16 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
# invalid, and it would fail auth checks anyway. # invalid, and it would fail auth checks anyway.
raise StoreError(400, "No create event in state") raise StoreError(400, "No create event in state")
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR) # Before MSC2175, the room creator was a separate field.
if not room_version.msc2175_implicit_room_creator:
room_creator = create_event.content.get(EventContentFields.ROOM_CREATOR)
if not isinstance(room_creator, str): if not isinstance(room_creator, str):
# If the create event does not have a creator then the room is # If the create event does not have a creator then the room is
# invalid, and it would fail auth checks anyway. # invalid, and it would fail auth checks anyway.
raise StoreError(400, "No creator defined on the create event") raise StoreError(400, "No creator defined on the create event")
else:
room_creator = create_event.sender
await self.db_pool.simple_upsert( await self.db_pool.simple_upsert(
desc="upsert_room_on_join", desc="upsert_room_on_join",

View File

@ -0,0 +1,17 @@
/* Copyright 2023 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
(7404, 'delete_e2e_backup_keys_for_deactivated_users', '{}');

View File

@ -0,0 +1,40 @@
# Copyright 2023 Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.config.appservice import AppServiceConfig, ConfigError
from tests.unittest import TestCase
class AppServiceConfigTest(TestCase):
def test_invalid_app_service_config_files(self) -> None:
for invalid_value in [
"foobar",
1,
None,
True,
False,
{},
["foo", "bar", False],
]:
with self.assertRaises(ConfigError):
AppServiceConfig().read_config(
{"app_service_config_files": invalid_value}
)
def test_valid_app_service_config_files(self) -> None:
AppServiceConfig().read_config({"app_service_config_files": []})
AppServiceConfig().read_config(
{"app_service_config_files": ["/not/a/real/path", "/not/a/real/path/2"]}
)

View File

@ -143,6 +143,13 @@ class PruneEventTestCase(stdlib_unittest.TestCase):
room_version=RoomVersions.MSC2176, room_version=RoomVersions.MSC2176,
) )
# As of MSC3989 we now redact the origin key.
self.run_test(
{"type": "A", "origin": "example.com"},
{"type": "A", "content": {}, "signatures": {}, "unsigned": {}},
room_version=RoomVersions.MSC3989,
)
def test_unsigned(self) -> None: def test_unsigned(self) -> None:
"""Ensure that unsigned properties get stripped (except age_ts and replaces_state).""" """Ensure that unsigned properties get stripped (except age_ts and replaces_state)."""
self.run_test( self.run_test(

View File

@ -474,6 +474,163 @@ class DeactivateTestCase(unittest.HomeserverTestCase):
self.assertEqual(len(memberships), 1, memberships) self.assertEqual(len(memberships), 1, memberships)
self.assertEqual(memberships[0].room_id, room_id, memberships) self.assertEqual(memberships[0].room_id, room_id, memberships)
def test_deactivate_account_deletes_server_side_backup_keys(self) -> None:
key_handler = self.hs.get_e2e_room_keys_handler()
room_keys = {
"rooms": {
"!abc:matrix.org": {
"sessions": {
"c0ff33": {
"first_message_index": 1,
"forwarded_count": 1,
"is_verified": False,
"session_data": "SSBBTSBBIEZJU0gK",
}
}
}
}
}
user_id = self.register_user("missPiggy", "test")
tok = self.login("missPiggy", "test")
# add some backup keys/versions
version = self.get_success(
key_handler.create_version(
user_id,
{
"algorithm": "m.megolm_backup.v1",
"auth_data": "first_version_auth_data",
},
)
)
self.get_success(key_handler.upload_room_keys(user_id, version, room_keys))
version2 = self.get_success(
key_handler.create_version(
user_id,
{
"algorithm": "m.megolm_backup.v1",
"auth_data": "second_version_auth_data",
},
)
)
self.get_success(key_handler.upload_room_keys(user_id, version2, room_keys))
self.deactivate(user_id, tok)
store = self.hs.get_datastores().main
# Check that the user has been marked as deactivated.
self.assertTrue(self.get_success(store.get_user_deactivated_status(user_id)))
# Check that there are no entries in 'e2e_room_keys` and `e2e_room_keys_versions`
res = self.get_success(
self.hs.get_datastores().main.db_pool.simple_select_list(
"e2e_room_keys", {"user_id": user_id}, "*", "simple_select"
)
)
self.assertEqual(len(res), 0)
res2 = self.get_success(
self.hs.get_datastores().main.db_pool.simple_select_list(
"e2e_room_keys_versions", {"user_id": user_id}, "*", "simple_select"
)
)
self.assertEqual(len(res2), 0)
def test_background_update_deletes_deactivated_users_server_side_backup_keys(
self,
) -> None:
key_handler = self.hs.get_e2e_room_keys_handler()
room_keys = {
"rooms": {
"!abc:matrix.org": {
"sessions": {
"c0ff33": {
"first_message_index": 1,
"forwarded_count": 1,
"is_verified": False,
"session_data": "SSBBTSBBIEZJU0gK",
}
}
}
}
}
self.store = self.hs.get_datastores().main
# create a bunch of users and add keys for them
users = []
for i in range(0, 20):
user_id = self.register_user("missPiggy" + str(i), "test")
users.append((user_id,))
# add some backup keys/versions
version = self.get_success(
key_handler.create_version(
user_id,
{
"algorithm": "m.megolm_backup.v1",
"auth_data": str(i) + "_version_auth_data",
},
)
)
self.get_success(key_handler.upload_room_keys(user_id, version, room_keys))
version2 = self.get_success(
key_handler.create_version(
user_id,
{
"algorithm": "m.megolm_backup.v1",
"auth_data": str(i) + "_version_auth_data",
},
)
)
self.get_success(key_handler.upload_room_keys(user_id, version2, room_keys))
# deactivate most of the users by editing DB
self.get_success(
self.store.db_pool.simple_update_many(
table="users",
key_names=("name",),
key_values=users[0:18],
value_names=("deactivated",),
value_values=[(1,) for i in range(1, 19)],
desc="",
)
)
# run background update
self.get_success(
self.store.db_pool.simple_insert(
"background_updates",
{
"update_name": "delete_e2e_backup_keys_for_deactivated_users",
"progress_json": "{}",
},
)
)
self.store.db_pool.updates._all_done = False
self.wait_for_background_updates()
# check that keys are deleted for the deactivated users but not the others
res = self.get_success(
self.hs.get_datastores().main.db_pool.simple_select_list(
"e2e_room_keys", None, ("user_id",), "simple_select"
)
)
self.assertEqual(len(res), 4)
res2 = self.get_success(
self.hs.get_datastores().main.db_pool.simple_select_list(
"e2e_room_keys_versions", None, ("user_id",), "simple_select"
)
)
self.assertEqual(len(res2), 4)
def deactivate(self, user_id: str, tok: str) -> None: def deactivate(self, user_id: str, tok: str) -> None:
request_data = { request_data = {
"auth": { "auth": {