Merge branch 'develop' of github.com:matrix-org/synapse into matrix-org-hotfixes
commit
118e789e0c
|
@ -1,16 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# this script is run by buildkite in a plain `xenial` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py35-old tox environment.
|
||||
# this script is run by buildkite in a plain `bionic` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
exec tox -e py35-old,combine
|
||||
exec tox -e py3-old,combine
|
||||
|
|
|
@ -0,0 +1,322 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check-sampleconfig"
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- name: Patch Buildkite-specific test script
|
||||
run: |
|
||||
sed -i -e 's/\$BUILDKITE_PULL_REQUEST/${{ github.event.number }}/' \
|
||||
scripts-dev/check-newsfragment
|
||||
- run: scripts-dev/check-newsfragment
|
||||
|
||||
lint-sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install wheel
|
||||
- run: python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Python Distributions
|
||||
path: dist/*
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ always() }} # Run this even if prior jobs were skipped
|
||||
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: "true"
|
||||
|
||||
trial:
|
||||
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9"]
|
||||
database: ["sqlite"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.9"
|
||||
toxenv: "py-noextras,combine"
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.6"
|
||||
database: "postgres"
|
||||
postgres-version: "9.6"
|
||||
|
||||
# Newest Python with PostgreSQL
|
||||
- python-version: "3.9"
|
||||
database: "postgres"
|
||||
postgres-version: "13"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: tox -e py,combine
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:bionic # For old python and sqlite
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .buildkite/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.6"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- run: tox -e py,combine
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
if: ${{ !failure() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
BUILDKITE_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bionic
|
||||
|
||||
- sytest-tag: bionic
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: testing
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: bionic
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .buildkite/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Dump results.tap
|
||||
if: ${{ always() }}
|
||||
run: cat /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.6"
|
||||
postgres-version: "9.6"
|
||||
|
||||
- python-version: "3.9"
|
||||
postgres-version: "13"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:${{ matrix.postgres-version }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Patch Buildkite-specific test scripts
|
||||
run: |
|
||||
sed -i -e 's/host="postgres"/host="localhost"/' .buildkite/scripts/create_postgres_db.py
|
||||
sed -i -e 's/host: postgres/host: localhost/' .buildkite/postgres-config.yaml
|
||||
sed -i -e 's|/src/||' .buildkite/{sqlite,postgres}-config.yaml
|
||||
sed -i -e 's/\$TOP/\$GITHUB_WORKSPACE/' .coveragerc
|
||||
- run: .buildkite/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
if: ${{ !failure() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
||||
image: matrixdotorg/complement:latest
|
||||
env:
|
||||
CI: true
|
||||
ports:
|
||||
- 8448:8448
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Run actions/checkout@v2 for complement
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "matrix-org/complement"
|
||||
path: complement
|
||||
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
# Run Complement
|
||||
- run: go test -v -tags synapse_blacklist ./tests
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
72
CHANGES.md
72
CHANGES.md
|
@ -1,3 +1,75 @@
|
|||
Synapse 1.32.0rc1 (2021-04-13)
|
||||
==============================
|
||||
|
||||
**Note:** This release requires Python 3.6+ and Postgres 9.6+ or SQLite 3.22+.
|
||||
|
||||
This release removes the deprecated `GET /_synapse/admin/v1/users/<user_id>` admin API. Please use the [v2 API](https://github.com/matrix-org/synapse/blob/develop/docs/admin_api/user_admin_api.rst#query-user-account) instead, which has improved capabilities.
|
||||
|
||||
This release requires Application Services to use type `m.login.application_services` when registering users via the `/_matrix/client/r0/register` endpoint to comply with the spec. Please ensure your Application Services are up to date.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add a Synapse module for routing presence updates between users. ([\#9491](https://github.com/matrix-org/synapse/issues/9491))
|
||||
- Add an admin API to manage ratelimit for a specific user. ([\#9648](https://github.com/matrix-org/synapse/issues/9648))
|
||||
- Include request information in structured logging output. ([\#9654](https://github.com/matrix-org/synapse/issues/9654))
|
||||
- Add `order_by` to the admin API `GET /_synapse/admin/v2/users`. Contributed by @dklimpel. ([\#9691](https://github.com/matrix-org/synapse/issues/9691))
|
||||
- Replace the `room_invite_state_types` configuration setting with `room_prejoin_state`. ([\#9700](https://github.com/matrix-org/synapse/issues/9700))
|
||||
- Add experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership. ([\#9717](https://github.com/matrix-org/synapse/issues/9717), [\#9735](https://github.com/matrix-org/synapse/issues/9735))
|
||||
- Update experimental support for Spaces: include `m.room.create` in the room state sent with room-invites. ([\#9710](https://github.com/matrix-org/synapse/issues/9710))
|
||||
- Synapse now requires Python 3.6 or later. It also requires Postgres 9.6 or later or SQLite 3.22 or later. ([\#9766](https://github.com/matrix-org/synapse/issues/9766))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Prevent `synapse_forward_extremities` and `synapse_excess_extremity_events` Prometheus metrics from initially reporting zero-values after startup. ([\#8926](https://github.com/matrix-org/synapse/issues/8926))
|
||||
- Fix recently added ratelimits to correctly honour the application service `rate_limited` flag. ([\#9711](https://github.com/matrix-org/synapse/issues/9711))
|
||||
- Fix longstanding bug which caused `duplicate key value violates unique constraint "remote_media_cache_thumbnails_media_origin_media_id_thumbna_key"` errors. ([\#9725](https://github.com/matrix-org/synapse/issues/9725))
|
||||
- Fix bug where sharded federation senders could get stuck repeatedly querying the DB in a loop, using lots of CPU. ([\#9770](https://github.com/matrix-org/synapse/issues/9770))
|
||||
- Fix duplicate logging of exceptions thrown during federation transaction processing. ([\#9780](https://github.com/matrix-org/synapse/issues/9780))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Move opencontainers labels to the final Docker image such that users can inspect them. ([\#9765](https://github.com/matrix-org/synapse/issues/9765))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Make the `allowed_local_3pids` regex example in the sample config stricter. ([\#9719](https://github.com/matrix-org/synapse/issues/9719))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove old admin API `GET /_synapse/admin/v1/users/<user_id>`. ([\#9401](https://github.com/matrix-org/synapse/issues/9401))
|
||||
- Make `/_matrix/client/r0/register` expect a type of `m.login.application_service` when an Application Service registers a user, to align with [the relevant spec](https://spec.matrix.org/unstable/application-service-api/#server-admin-style-permissions). ([\#9548](https://github.com/matrix-org/synapse/issues/9548))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Replace deprecated `imp` module with successor `importlib`. Contributed by Cristina Muñoz. ([\#9718](https://github.com/matrix-org/synapse/issues/9718))
|
||||
- Experiment with GitHub Actions for CI. ([\#9661](https://github.com/matrix-org/synapse/issues/9661))
|
||||
- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9682](https://github.com/matrix-org/synapse/issues/9682))
|
||||
- Update `scripts-dev/complement.sh` to use a local checkout of Complement, allow running a subset of tests and have it use Synapse's Complement test blacklist. ([\#9685](https://github.com/matrix-org/synapse/issues/9685))
|
||||
- Improve Jaeger tracing for `to_device` messages. ([\#9686](https://github.com/matrix-org/synapse/issues/9686))
|
||||
- Add release helper script for automating part of the Synapse release process. ([\#9713](https://github.com/matrix-org/synapse/issues/9713))
|
||||
- Add type hints to expiring cache. ([\#9730](https://github.com/matrix-org/synapse/issues/9730))
|
||||
- Convert various testcases to `HomeserverTestCase`. ([\#9736](https://github.com/matrix-org/synapse/issues/9736))
|
||||
- Start linting mypy with `no_implicit_optional`. ([\#9742](https://github.com/matrix-org/synapse/issues/9742))
|
||||
- Add missing type hints to federation handler and server. ([\#9743](https://github.com/matrix-org/synapse/issues/9743))
|
||||
- Check that a `ConfigError` is raised, rather than simply `Exception`, when appropriate in homeserver config file generation tests. ([\#9753](https://github.com/matrix-org/synapse/issues/9753))
|
||||
- Fix incompatibility with `tox` 2.5. ([\#9769](https://github.com/matrix-org/synapse/issues/9769))
|
||||
- Enable Complement tests for [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946): Spaces Summary API. ([\#9771](https://github.com/matrix-org/synapse/issues/9771))
|
||||
- Use mock from the standard library instead of a separate package. ([\#9772](https://github.com/matrix-org/synapse/issues/9772))
|
||||
- Update Black configuration to target Python 3.6. ([\#9781](https://github.com/matrix-org/synapse/issues/9781))
|
||||
- Add option to skip unit tests when building Debian packages. ([\#9793](https://github.com/matrix-org/synapse/issues/9793))
|
||||
|
||||
|
||||
Synapse 1.31.0 (2021-04-06)
|
||||
===========================
|
||||
|
||||
|
|
13
UPGRADE.rst
13
UPGRADE.rst
|
@ -85,6 +85,19 @@ for example:
|
|||
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
|
||||
Upgrading to v1.32.0
|
||||
====================
|
||||
|
||||
Removal of old List Accounts Admin API
|
||||
--------------------------------------
|
||||
|
||||
The deprecated v1 "list accounts" admin API (``GET /_synapse/admin/v1/users/<user_id>``) has been removed in this version.
|
||||
|
||||
The `v2 list accounts API <https://github.com/matrix-org/synapse/blob/master/docs/admin_api/user_admin_api.rst#list-accounts>`_
|
||||
has been available since Synapse 1.7.0 (2019-12-13), and is accessible under ``GET /_synapse/admin/v2/users``.
|
||||
|
||||
The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
|
||||
|
||||
Upgrading to v1.29.0
|
||||
====================
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Prevent `synapse_forward_extremities` and `synapse_excess_extremity_events` Prometheus metrics from initially reporting zero-values after startup.
|
|
@ -1 +0,0 @@
|
|||
Add a Synapse module for routing presence updates between users.
|
|
@ -1 +0,0 @@
|
|||
Include request information in structured logging output.
|
|
@ -1 +0,0 @@
|
|||
Update `scripts-dev/complement.sh` to use a local checkout of Complement, allow running a subset of tests and have it use Synapse's Complement test blacklist.
|
|
@ -1 +0,0 @@
|
|||
Improve Jaeger tracing for `to_device` messages.
|
|
@ -1 +0,0 @@
|
|||
Add `order_by` to the admin API `GET /_synapse/admin/v2/users`. Contributed by @dklimpel.
|
|
@ -1 +0,0 @@
|
|||
Replace the `room_invite_state_types` configuration setting with `room_prejoin_state`.
|
|
@ -1 +0,0 @@
|
|||
Experimental Spaces support: include `m.room.create` in the room state sent with room-invites.
|
|
@ -1 +0,0 @@
|
|||
Fix recently added ratelimits to correctly honour the application service `rate_limited` flag.
|
|
@ -1 +0,0 @@
|
|||
Add experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership.
|
|
@ -1 +0,0 @@
|
|||
Replace deprecated `imp` module with successor `importlib`. Contributed by Cristina Muñoz.
|
|
@ -1 +0,0 @@
|
|||
Make the allowed_local_3pids regex example in the sample config stricter.
|
|
@ -1 +0,0 @@
|
|||
Fix longstanding bug which caused `duplicate key value violates unique constraint "remote_media_cache_thumbnails_media_origin_media_id_thumbna_key"` errors.
|
|
@ -1 +0,0 @@
|
|||
Add type hints to expiring cache.
|
|
@ -1 +0,0 @@
|
|||
Convert various testcases to `HomeserverTestCase`.
|
|
@ -1 +0,0 @@
|
|||
Start linting mypy with `no_implicit_optional`.
|
|
@ -1 +0,0 @@
|
|||
Add missing type hints to federation handler and server.
|
|
@ -1 +0,0 @@
|
|||
Check that a `ConfigError` is raised, rather than simply `Exception`, when appropriate in homeserver config file generation tests.
|
|
@ -1 +0,0 @@
|
|||
Fix bug where sharded federation senders could get stuck repeatedly querying the DB in a loop, using lots of CPU.
|
|
@ -24,6 +24,7 @@ import sys
|
|||
import time
|
||||
import urllib
|
||||
from http import TwistedHttpClient
|
||||
from typing import Optional
|
||||
|
||||
import nacl.encoding
|
||||
import nacl.signing
|
||||
|
@ -718,7 +719,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
method,
|
||||
path,
|
||||
data=None,
|
||||
query_params={"access_token": None},
|
||||
query_params: Optional[dict] = None,
|
||||
alt_text=None,
|
||||
):
|
||||
"""Runs an HTTP request and pretty prints the output.
|
||||
|
@ -729,6 +730,8 @@ class SynapseCmd(cmd.Cmd):
|
|||
data: Raw JSON data if any
|
||||
query_params: dict of query parameters to add to the url
|
||||
"""
|
||||
query_params = query_params or {"access_token": None}
|
||||
|
||||
url = self._url() + path
|
||||
if "access_token" in query_params:
|
||||
query_params["access_token"] = self._tok()
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import json
|
||||
import urllib
|
||||
from pprint import pformat
|
||||
from typing import Optional
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.client import Agent, readBody
|
||||
|
@ -85,8 +86,9 @@ class TwistedHttpClient(HttpClient):
|
|||
body = yield readBody(response)
|
||||
defer.returnValue(json.loads(body))
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
if "Content-Type" not in headers_dict:
|
||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||
|
@ -95,14 +97,22 @@ class TwistedHttpClient(HttpClient):
|
|||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict={}):
|
||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a GET request"""
|
||||
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_request(
|
||||
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
data=None,
|
||||
qparams=None,
|
||||
jsonreq=True,
|
||||
headers: Optional[dict] = None,
|
||||
):
|
||||
headers = headers or {}
|
||||
|
||||
if qparams:
|
||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||
|
||||
|
@ -123,8 +133,12 @@ class TwistedHttpClient(HttpClient):
|
|||
defer.returnValue(json.loads(body))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||
def _create_request(
|
||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||
):
|
||||
"""Creates and sends a request to the given url"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||
|
||||
retries_left = 5
|
||||
|
|
|
@ -50,15 +50,24 @@ PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
|||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
||||
|
||||
# we copy the tests to a temporary directory so that we can put them on the
|
||||
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
||||
tmpdir=`mktemp -d`
|
||||
trap "rm -r $tmpdir" EXIT
|
||||
case "$DEB_BUILD_OPTIONS" in
|
||||
*nocheck*)
|
||||
# Skip running tests if "nocheck" present in $DEB_BUILD_OPTIONS
|
||||
;;
|
||||
|
||||
cp -r tests "$tmpdir"
|
||||
*)
|
||||
# Copy tests to a temporary directory so that we can put them on the
|
||||
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
||||
tmpdir=`mktemp -d`
|
||||
trap "rm -r $tmpdir" EXIT
|
||||
|
||||
PYTHONPATH="$tmpdir" \
|
||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
||||
cp -r tests "$tmpdir"
|
||||
|
||||
PYTHONPATH="$tmpdir" \
|
||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
||||
|
||||
;;
|
||||
esac
|
||||
|
||||
# build the config file
|
||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
matrix-synapse-py3 (1.31.0+nmu1) UNRELEASED; urgency=medium
|
||||
|
||||
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
|
||||
|
||||
-- Dan Callahan <danc@element.io> Mon, 12 Apr 2021 13:07:36 +0000
|
||||
|
||||
matrix-synapse-py3 (1.31.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.31.0.
|
||||
|
|
|
@ -18,11 +18,6 @@ ARG PYTHON_VERSION=3.8
|
|||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='Apache-2.0'
|
||||
|
||||
# install the OS build deps
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
|
@ -66,6 +61,11 @@ RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
|||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='Apache-2.0'
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
gosu \
|
||||
|
|
|
@ -202,7 +202,7 @@ The following fields are returned in the JSON response body:
|
|||
- ``users`` - An array of objects, each containing information about an user.
|
||||
User objects contain the following fields:
|
||||
|
||||
- ``name`` - string - Fully-qualified user ID (ex. `@user:server.com`).
|
||||
- ``name`` - string - Fully-qualified user ID (ex. ``@user:server.com``).
|
||||
- ``is_guest`` - bool - Status if that user is a guest account.
|
||||
- ``admin`` - bool - Status if that user is a server administrator.
|
||||
- ``user_type`` - string - Type of the user. Normal users are type ``None``.
|
||||
|
@ -864,3 +864,118 @@ The following parameters should be set in the URL:
|
|||
|
||||
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
|
||||
be local.
|
||||
|
||||
Override ratelimiting for users
|
||||
===============================
|
||||
|
||||
This API allows to override or disable ratelimiting for a specific user.
|
||||
There are specific APIs to set, get and delete a ratelimit.
|
||||
|
||||
Get status of ratelimit
|
||||
-----------------------
|
||||
|
||||
The API is::
|
||||
|
||||
GET /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"messages_per_second": 0,
|
||||
"burst_count": 0
|
||||
}
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
|
||||
be local.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
- ``messages_per_second`` - integer - The number of actions that can
|
||||
be performed in a second. `0` mean that ratelimiting is disabled for this user.
|
||||
- ``burst_count`` - integer - How many actions that can be performed before
|
||||
being limited.
|
||||
|
||||
If **no** custom ratelimit is set, an empty JSON dict is returned.
|
||||
|
||||
.. code:: json
|
||||
|
||||
{}
|
||||
|
||||
Set ratelimit
|
||||
-------------
|
||||
|
||||
The API is::
|
||||
|
||||
POST /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"messages_per_second": 0,
|
||||
"burst_count": 0
|
||||
}
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
|
||||
be local.
|
||||
|
||||
Body parameters:
|
||||
|
||||
- ``messages_per_second`` - positive integer, optional. The number of actions that can
|
||||
be performed in a second. Defaults to ``0``.
|
||||
- ``burst_count`` - positive integer, optional. How many actions that can be performed
|
||||
before being limited. Defaults to ``0``.
|
||||
|
||||
To disable users' ratelimit set both values to ``0``.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
- ``messages_per_second`` - integer - The number of actions that can
|
||||
be performed in a second.
|
||||
- ``burst_count`` - integer - How many actions that can be performed before
|
||||
being limited.
|
||||
|
||||
Delete ratelimit
|
||||
----------------
|
||||
|
||||
The API is::
|
||||
|
||||
DELETE /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
An empty JSON dict is returned.
|
||||
|
||||
.. code:: json
|
||||
|
||||
{}
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
|
||||
be local.
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
showcontent = true
|
||||
|
||||
[tool.black]
|
||||
target-version = ['py35']
|
||||
target-version = ['py36']
|
||||
exclude = '''
|
||||
|
||||
(
|
||||
|
|
|
@ -18,11 +18,9 @@ import threading
|
|||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
DISTS = (
|
||||
"debian:stretch",
|
||||
"debian:buster",
|
||||
"debian:bullseye",
|
||||
"debian:sid",
|
||||
"ubuntu:xenial",
|
||||
"ubuntu:bionic",
|
||||
"ubuntu:focal",
|
||||
"ubuntu:groovy",
|
||||
|
@ -43,7 +41,7 @@ class Builder(object):
|
|||
self._lock = threading.Lock()
|
||||
self._failed = False
|
||||
|
||||
def run_build(self, dist):
|
||||
def run_build(self, dist, skip_tests=False):
|
||||
"""Build deb for a single distribution"""
|
||||
|
||||
if self._failed:
|
||||
|
@ -51,13 +49,13 @@ class Builder(object):
|
|||
raise Exception("failed")
|
||||
|
||||
try:
|
||||
self._inner_build(dist)
|
||||
self._inner_build(dist, skip_tests)
|
||||
except Exception as e:
|
||||
print("build of %s failed: %s" % (dist, e), file=sys.stderr)
|
||||
self._failed = True
|
||||
raise
|
||||
|
||||
def _inner_build(self, dist):
|
||||
def _inner_build(self, dist, skip_tests=False):
|
||||
projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
os.chdir(projdir)
|
||||
|
||||
|
@ -101,6 +99,7 @@ class Builder(object):
|
|||
"--volume=" + debsdir + ":/debs",
|
||||
"-e", "TARGET_USERID=%i" % (os.getuid(), ),
|
||||
"-e", "TARGET_GROUPID=%i" % (os.getgid(), ),
|
||||
"-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
|
||||
"dh-venv-builder:" + tag,
|
||||
], stdout=stdout, stderr=subprocess.STDOUT)
|
||||
|
||||
|
@ -124,7 +123,7 @@ class Builder(object):
|
|||
self.active_containers.remove(c)
|
||||
|
||||
|
||||
def run_builds(dists, jobs=1):
|
||||
def run_builds(dists, jobs=1, skip_tests=False):
|
||||
builder = Builder(redirect_stdout=(jobs > 1))
|
||||
|
||||
def sig(signum, _frame):
|
||||
|
@ -133,7 +132,7 @@ def run_builds(dists, jobs=1):
|
|||
signal.signal(signal.SIGINT, sig)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=jobs) as e:
|
||||
res = e.map(builder.run_build, dists)
|
||||
res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists)
|
||||
|
||||
# make sure we consume the iterable so that exceptions are raised.
|
||||
for r in res:
|
||||
|
@ -148,9 +147,13 @@ if __name__ == '__main__':
|
|||
'-j', '--jobs', type=int, default=1,
|
||||
help='specify the number of builds to run in parallel',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-check', action='store_true',
|
||||
help='skip running tests after building',
|
||||
)
|
||||
parser.add_argument(
|
||||
'dist', nargs='*', default=DISTS,
|
||||
help='a list of distributions to build for. Default: %(default)s',
|
||||
)
|
||||
args = parser.parse_args()
|
||||
run_builds(dists=args.dist, jobs=args.jobs)
|
||||
run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
|
||||
|
|
|
@ -46,4 +46,4 @@ if [[ -n "$1" ]]; then
|
|||
fi
|
||||
|
||||
# Run the tests!
|
||||
COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -tags synapse_blacklist -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
|
||||
COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -tags synapse_blacklist,msc2946,msc3083 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
|
||||
|
|
|
@ -0,0 +1,244 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""An interactive script for doing a release. See `run()` below.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
import git
|
||||
from packaging import version
|
||||
from redbaron import RedBaron
|
||||
|
||||
|
||||
@click.command()
|
||||
def run():
|
||||
"""An interactive script to walk through the initial stages of creating a
|
||||
release, including creating release branch, updating changelog and pushing to
|
||||
GitHub.
|
||||
|
||||
Requires the dev dependencies be installed, which can be done via:
|
||||
|
||||
pip install -e .[dev]
|
||||
|
||||
"""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
|
||||
click.secho("Updating git repo...")
|
||||
repo.remote().fetch()
|
||||
|
||||
# Parse the AST and load the `__version__` node so that we can edit it
|
||||
# later.
|
||||
with open("synapse/__init__.py") as f:
|
||||
red = RedBaron(f.read())
|
||||
|
||||
version_node = None
|
||||
for node in red:
|
||||
if node.type != "assignment":
|
||||
continue
|
||||
|
||||
if node.target.type != "name":
|
||||
continue
|
||||
|
||||
if node.target.value != "__version__":
|
||||
continue
|
||||
|
||||
version_node = node
|
||||
break
|
||||
|
||||
if not version_node:
|
||||
print("Failed to find '__version__' definition in synapse/__init__.py")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse the current version.
|
||||
current_version = version.parse(version_node.value.value.strip('"'))
|
||||
assert isinstance(current_version, version.Version)
|
||||
|
||||
# Figure out what sort of release we're doing and calcuate the new version.
|
||||
rc = click.confirm("RC", default=True)
|
||||
if current_version.pre:
|
||||
# If the current version is an RC we don't need to bump any of the
|
||||
# version numbers (other than the RC number).
|
||||
base_version = "{}.{}.{}".format(
|
||||
current_version.major,
|
||||
current_version.minor,
|
||||
current_version.micro,
|
||||
)
|
||||
|
||||
if rc:
|
||||
new_version = "{}.{}.{}rc{}".format(
|
||||
current_version.major,
|
||||
current_version.minor,
|
||||
current_version.micro,
|
||||
current_version.pre[1] + 1,
|
||||
)
|
||||
else:
|
||||
new_version = base_version
|
||||
else:
|
||||
# If this is a new release cycle then we need to know if its a major
|
||||
# version bump or a hotfix.
|
||||
release_type = click.prompt(
|
||||
"Release type",
|
||||
type=click.Choice(("major", "hotfix")),
|
||||
show_choices=True,
|
||||
default="major",
|
||||
)
|
||||
|
||||
if release_type == "major":
|
||||
base_version = new_version = "{}.{}.{}".format(
|
||||
current_version.major,
|
||||
current_version.minor + 1,
|
||||
0,
|
||||
)
|
||||
if rc:
|
||||
new_version = "{}.{}.{}rc1".format(
|
||||
current_version.major,
|
||||
current_version.minor + 1,
|
||||
0,
|
||||
)
|
||||
|
||||
else:
|
||||
base_version = new_version = "{}.{}.{}".format(
|
||||
current_version.major,
|
||||
current_version.minor,
|
||||
current_version.micro + 1,
|
||||
)
|
||||
if rc:
|
||||
new_version = "{}.{}.{}rc1".format(
|
||||
current_version.major,
|
||||
current_version.minor,
|
||||
current_version.micro + 1,
|
||||
)
|
||||
|
||||
# Confirm the calculated version is OK.
|
||||
if not click.confirm(f"Create new version: {new_version}?", default=True):
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Switch to the release branch.
|
||||
release_branch_name = f"release-v{base_version}"
|
||||
release_branch = find_ref(repo, release_branch_name)
|
||||
if release_branch:
|
||||
if release_branch.is_remote():
|
||||
# If the release branch only exists on the remote we check it out
|
||||
# locally.
|
||||
repo.git.checkout(release_branch_name)
|
||||
release_branch = repo.active_branch
|
||||
else:
|
||||
# If a branch doesn't exist we create one. We ask which one branch it
|
||||
# should be based off, defaulting to sensible values depending on the
|
||||
# release type.
|
||||
if current_version.is_prerelease:
|
||||
default = release_branch_name
|
||||
elif release_type == "major":
|
||||
default = "develop"
|
||||
else:
|
||||
default = "master"
|
||||
|
||||
branch_name = click.prompt(
|
||||
"Which branch should the release be based on?", default=default
|
||||
)
|
||||
|
||||
base_branch = find_ref(repo, branch_name)
|
||||
if not base_branch:
|
||||
print(f"Could not find base branch {branch_name}!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Check out the base branch and ensure it's up to date
|
||||
repo.head.reference = base_branch
|
||||
repo.head.reset(index=True, working_tree=True)
|
||||
if not base_branch.is_remote():
|
||||
update_branch(repo)
|
||||
|
||||
# Create the new release branch
|
||||
release_branch = repo.create_head(release_branch_name, commit=base_branch)
|
||||
|
||||
# Switch to the release branch and ensure its up to date.
|
||||
repo.git.checkout(release_branch_name)
|
||||
update_branch(repo)
|
||||
|
||||
# Update the `__version__` variable and write it back to the file.
|
||||
version_node.value = '"' + new_version + '"'
|
||||
with open("synapse/__init__.py", "w") as f:
|
||||
f.write(red.dumps())
|
||||
|
||||
# Generate changelogs
|
||||
subprocess.run("python3 -m towncrier", shell=True)
|
||||
|
||||
# Generate debian changelogs if its not an RC.
|
||||
if not rc:
|
||||
subprocess.run(
|
||||
f'dch -M -v {new_version} "New synapse release {new_version}."', shell=True
|
||||
)
|
||||
subprocess.run('dch -M -r -D stable ""', shell=True)
|
||||
|
||||
# Show the user the changes and ask if they want to edit the change log.
|
||||
repo.git.add("-u")
|
||||
subprocess.run("git diff --cached", shell=True)
|
||||
|
||||
if click.confirm("Edit changelog?", default=False):
|
||||
click.edit(filename="CHANGES.md")
|
||||
|
||||
# Commit the changes.
|
||||
repo.git.add("-u")
|
||||
repo.git.commit(f"-m {new_version}")
|
||||
|
||||
# We give the option to bail here in case the user wants to make sure things
|
||||
# are OK before pushing.
|
||||
if not click.confirm("Push branch to github?", default=True):
|
||||
print("")
|
||||
print("Run when ready to push:")
|
||||
print("")
|
||||
print(f"\tgit push -u {repo.remote().name} {repo.active_branch.name}")
|
||||
print("")
|
||||
sys.exit(0)
|
||||
|
||||
# Otherwise, push and open the changelog in the browser.
|
||||
repo.git.push("-u", repo.remote().name, repo.active_branch.name)
|
||||
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
|
||||
)
|
||||
|
||||
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||
"""Find the branch/ref, looking first locally then in the remote."""
|
||||
if ref_name in repo.refs:
|
||||
return repo.refs[ref_name]
|
||||
elif ref_name in repo.remote().refs:
|
||||
return repo.remote().refs[ref_name]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def update_branch(repo: git.Repo):
|
||||
"""Ensure branch is up to date if it has a remote"""
|
||||
if repo.active_branch.tracking_branch():
|
||||
repo.git.merge(repo.active_branch.tracking_branch().name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
|
@ -18,16 +18,15 @@ ignore =
|
|||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
# E501: Line too long (black enforces this for us)
|
||||
# B00*: Subsection of the bugbear suite (TODO: add in remaining fixes)
|
||||
ignore=W503,W504,E203,E731,E501,B006,B007,B008
|
||||
# B007: Subsection of the bugbear suite (TODO: add in remaining fixes)
|
||||
ignore=W503,W504,E203,E731,E501,B007
|
||||
|
||||
[isort]
|
||||
line_length = 88
|
||||
sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
|
||||
sections=FUTURE,STDLIB,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
|
||||
default_section=THIRDPARTY
|
||||
known_first_party = synapse
|
||||
known_tests=tests
|
||||
known_compat = mock
|
||||
known_twisted=twisted,OpenSSL
|
||||
multi_line_output=3
|
||||
include_trailing_comma=true
|
||||
|
|
12
setup.py
12
setup.py
|
@ -103,6 +103,13 @@ CONDITIONAL_REQUIREMENTS["lint"] = [
|
|||
"flake8",
|
||||
]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["dev"] = CONDITIONAL_REQUIREMENTS["lint"] + [
|
||||
# The following are used by the release script
|
||||
"click==7.1.2",
|
||||
"redbaron==0.9.2",
|
||||
"GitPython==3.1.14",
|
||||
]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
|
||||
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
|
@ -110,7 +117,7 @@ CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
|
|||
# Tests assume that all optional dependencies are installed.
|
||||
#
|
||||
# parameterized_class decorator was introduced in parameterized 0.7.0
|
||||
CONDITIONAL_REQUIREMENTS["test"] = ["mock>=2.0", "parameterized>=0.7.0"]
|
||||
CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0"]
|
||||
|
||||
setup(
|
||||
name="matrix-synapse",
|
||||
|
@ -123,13 +130,12 @@ setup(
|
|||
zip_safe=False,
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/x-rst",
|
||||
python_requires="~=3.5",
|
||||
python_requires="~=3.6",
|
||||
classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
|
|
|
@ -48,7 +48,7 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.31.0"
|
||||
__version__ = "1.32.0rc1"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
|
|
@ -73,6 +73,11 @@ class LoginType:
|
|||
DUMMY = "m.login.dummy"
|
||||
|
||||
|
||||
# This is used in the `type` parameter for /register when called by
|
||||
# an appservice to register a new user.
|
||||
APP_SERVICE_REGISTRATION_TYPE = "m.login.application_service"
|
||||
|
||||
|
||||
class EventTypes:
|
||||
Member = "m.room.member"
|
||||
Create = "m.room.create"
|
||||
|
|
|
@ -49,7 +49,7 @@ This is all tied together by the AppServiceScheduler which DIs the required
|
|||
components.
|
||||
"""
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from synapse.appservice import ApplicationService, ApplicationServiceState
|
||||
from synapse.events import EventBase
|
||||
|
@ -191,11 +191,11 @@ class _TransactionController:
|
|||
self,
|
||||
service: ApplicationService,
|
||||
events: List[EventBase],
|
||||
ephemeral: List[JsonDict] = [],
|
||||
ephemeral: Optional[List[JsonDict]] = None,
|
||||
):
|
||||
try:
|
||||
txn = await self.store.create_appservice_txn(
|
||||
service=service, events=events, ephemeral=ephemeral
|
||||
service=service, events=events, ephemeral=ephemeral or []
|
||||
)
|
||||
service_is_up = await self._is_service_up(service)
|
||||
if service_is_up:
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from ._base import Config
|
||||
|
||||
|
@ -21,8 +21,10 @@ class RateLimitConfig:
|
|||
def __init__(
|
||||
self,
|
||||
config: Dict[str, float],
|
||||
defaults={"per_second": 0.17, "burst_count": 3.0},
|
||||
defaults: Optional[Dict[str, float]] = None,
|
||||
):
|
||||
defaults = defaults or {"per_second": 0.17, "burst_count": 3.0}
|
||||
|
||||
self.per_second = config.get("per_second", defaults["per_second"])
|
||||
self.burst_count = int(config.get("burst_count", defaults["burst_count"]))
|
||||
|
||||
|
|
|
@ -270,7 +270,7 @@ class TlsConfig(Config):
|
|||
tls_certificate_path,
|
||||
tls_private_key_path,
|
||||
acme_domain,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""If the acme_domain is specified acme will be enabled.
|
||||
If the TLS paths are not specified the default will be certs in the
|
||||
|
|
|
@ -330,9 +330,11 @@ class FrozenEvent(EventBase):
|
|||
self,
|
||||
event_dict: JsonDict,
|
||||
room_version: RoomVersion,
|
||||
internal_metadata_dict: JsonDict = {},
|
||||
internal_metadata_dict: Optional[JsonDict] = None,
|
||||
rejected_reason: Optional[str] = None,
|
||||
):
|
||||
internal_metadata_dict = internal_metadata_dict or {}
|
||||
|
||||
event_dict = dict(event_dict)
|
||||
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
|
@ -386,9 +388,11 @@ class FrozenEventV2(EventBase):
|
|||
self,
|
||||
event_dict: JsonDict,
|
||||
room_version: RoomVersion,
|
||||
internal_metadata_dict: JsonDict = {},
|
||||
internal_metadata_dict: Optional[JsonDict] = None,
|
||||
rejected_reason: Optional[str] = None,
|
||||
):
|
||||
internal_metadata_dict = internal_metadata_dict or {}
|
||||
|
||||
event_dict = dict(event_dict)
|
||||
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
|
@ -507,9 +511,11 @@ def _event_type_from_format_version(format_version: int) -> Type[EventBase]:
|
|||
def make_event_from_dict(
|
||||
event_dict: JsonDict,
|
||||
room_version: RoomVersion = RoomVersions.V1,
|
||||
internal_metadata_dict: JsonDict = {},
|
||||
internal_metadata_dict: Optional[JsonDict] = None,
|
||||
rejected_reason: Optional[str] = None,
|
||||
) -> EventBase:
|
||||
"""Construct an EventBase from the given event dict"""
|
||||
event_type = _event_type_from_format_version(room_version.event_format)
|
||||
return event_type(event_dict, room_version, internal_metadata_dict, rejected_reason)
|
||||
return event_type(
|
||||
event_dict, room_version, internal_metadata_dict or {}, rejected_reason
|
||||
)
|
||||
|
|
|
@ -425,13 +425,9 @@ class FederationSendServlet(BaseFederationServlet):
|
|||
logger.exception(e)
|
||||
return 400, {"error": "Invalid transaction"}
|
||||
|
||||
try:
|
||||
code, response = await self.handler.on_incoming_transaction(
|
||||
origin, transaction_data
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("on_incoming_transaction failed")
|
||||
raise
|
||||
code, response = await self.handler.on_incoming_transaction(
|
||||
origin, transaction_data
|
||||
)
|
||||
|
||||
return code, response
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ server protocol.
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -98,7 +99,7 @@ class Transaction(JsonEncodedObject):
|
|||
"pdus",
|
||||
]
|
||||
|
||||
def __init__(self, transaction_id=None, pdus=[], **kwargs):
|
||||
def __init__(self, transaction_id=None, pdus: Optional[list] = None, **kwargs):
|
||||
"""If we include a list of pdus then we decode then as PDU's
|
||||
automatically.
|
||||
"""
|
||||
|
@ -107,7 +108,7 @@ class Transaction(JsonEncodedObject):
|
|||
if "edus" in kwargs and not kwargs["edus"]:
|
||||
del kwargs["edus"]
|
||||
|
||||
super().__init__(transaction_id=transaction_id, pdus=pdus, **kwargs)
|
||||
super().__init__(transaction_id=transaction_id, pdus=pdus or [], **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def create_new(pdus, **kwargs):
|
||||
|
|
|
@ -182,7 +182,7 @@ class ApplicationServicesHandler:
|
|||
self,
|
||||
stream_key: str,
|
||||
new_token: Optional[int],
|
||||
users: Collection[Union[str, UserID]] = [],
|
||||
users: Optional[Collection[Union[str, UserID]]] = None,
|
||||
):
|
||||
"""This is called by the notifier in the background
|
||||
when a ephemeral event handled by the homeserver.
|
||||
|
@ -215,7 +215,7 @@ class ApplicationServicesHandler:
|
|||
# We only start a new background process if necessary rather than
|
||||
# optimistically (to cut down on overhead).
|
||||
self._notify_interested_services_ephemeral(
|
||||
services, stream_key, new_token, users
|
||||
services, stream_key, new_token, users or []
|
||||
)
|
||||
|
||||
@wrap_as_background_process("notify_interested_services_ephemeral")
|
||||
|
|
|
@ -1790,7 +1790,7 @@ class FederationHandler(BaseHandler):
|
|||
room_id: str,
|
||||
user_id: str,
|
||||
membership: str,
|
||||
content: JsonDict = {},
|
||||
content: JsonDict,
|
||||
params: Optional[Dict[str, Union[str, Iterable[str]]]] = None,
|
||||
) -> Tuple[str, EventBase, RoomVersion]:
|
||||
(
|
||||
|
|
|
@ -137,7 +137,7 @@ class MessageHandler:
|
|||
self,
|
||||
user_id: str,
|
||||
room_id: str,
|
||||
state_filter: StateFilter = StateFilter.all(),
|
||||
state_filter: Optional[StateFilter] = None,
|
||||
at_token: Optional[StreamToken] = None,
|
||||
is_guest: bool = False,
|
||||
) -> List[dict]:
|
||||
|
@ -164,6 +164,8 @@ class MessageHandler:
|
|||
AuthError (403) if the user doesn't have permission to view
|
||||
members of this room.
|
||||
"""
|
||||
state_filter = state_filter or StateFilter.all()
|
||||
|
||||
if at_token:
|
||||
# FIXME this claims to get the state at a stream position, but
|
||||
# get_recent_events_for_room operates by topo ordering. This therefore
|
||||
|
@ -874,7 +876,7 @@ class EventCreationHandler:
|
|||
event: EventBase,
|
||||
context: EventContext,
|
||||
ratelimit: bool = True,
|
||||
extra_users: List[UserID] = [],
|
||||
extra_users: Optional[List[UserID]] = None,
|
||||
ignore_shadow_ban: bool = False,
|
||||
) -> EventBase:
|
||||
"""Processes a new event.
|
||||
|
@ -902,6 +904,7 @@ class EventCreationHandler:
|
|||
Raises:
|
||||
ShadowBanError if the requester has been shadow-banned.
|
||||
"""
|
||||
extra_users = extra_users or []
|
||||
|
||||
# we don't apply shadow-banning to membership events here. Invites are blocked
|
||||
# higher up the stack, and we allow shadow-banned users to send join and leave
|
||||
|
@ -1071,7 +1074,7 @@ class EventCreationHandler:
|
|||
event: EventBase,
|
||||
context: EventContext,
|
||||
ratelimit: bool = True,
|
||||
extra_users: List[UserID] = [],
|
||||
extra_users: Optional[List[UserID]] = None,
|
||||
) -> EventBase:
|
||||
"""Called when we have fully built the event, have already
|
||||
calculated the push actions for the event, and checked auth.
|
||||
|
@ -1083,6 +1086,8 @@ class EventCreationHandler:
|
|||
it was de-duplicated (e.g. because we had already persisted an
|
||||
event with the same transaction ID.)
|
||||
"""
|
||||
extra_users = extra_users or []
|
||||
|
||||
assert self.storage.persistence is not None
|
||||
assert self._events_shard_config.should_handle(
|
||||
self._instance_name, event.room_id
|
||||
|
|
|
@ -1071,7 +1071,7 @@ class PresenceEventSource:
|
|||
room_ids=None,
|
||||
include_offline=True,
|
||||
explicit_room_id=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
) -> Tuple[List[UserPresenceState], int]:
|
||||
# The process for getting presence events are:
|
||||
# 1. Get the rooms the user is in.
|
||||
|
|
|
@ -169,7 +169,7 @@ class RegistrationHandler(BaseHandler):
|
|||
user_type: Optional[str] = None,
|
||||
default_display_name: Optional[str] = None,
|
||||
address: Optional[str] = None,
|
||||
bind_emails: Iterable[str] = [],
|
||||
bind_emails: Optional[Iterable[str]] = None,
|
||||
by_admin: bool = False,
|
||||
user_agent_ips: Optional[List[Tuple[str, str]]] = None,
|
||||
auth_provider_id: Optional[str] = None,
|
||||
|
@ -204,6 +204,8 @@ class RegistrationHandler(BaseHandler):
|
|||
Raises:
|
||||
SynapseError if there was a problem registering.
|
||||
"""
|
||||
bind_emails = bind_emails or []
|
||||
|
||||
await self.check_registration_ratelimit(address)
|
||||
|
||||
result = await self.spam_checker.check_registration_for_spam(
|
||||
|
|
|
@ -20,7 +20,7 @@ from http import HTTPStatus
|
|||
from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
|
||||
|
||||
from synapse import types
|
||||
from synapse.api.constants import AccountDataTypes, EventTypes, Membership
|
||||
from synapse.api.constants import AccountDataTypes, EventTypes, JoinRules, Membership
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
|
@ -29,6 +29,7 @@ from synapse.api.errors import (
|
|||
SynapseError,
|
||||
)
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.types import JsonDict, Requester, RoomAlias, RoomID, StateMap, UserID
|
||||
|
@ -179,6 +180,62 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
|||
|
||||
await self._invites_per_user_limiter.ratelimit(requester, invitee_user_id)
|
||||
|
||||
async def _can_join_without_invite(
|
||||
self, state_ids: StateMap[str], room_version: RoomVersion, user_id: str
|
||||
) -> bool:
|
||||
"""
|
||||
Check whether a user can join a room without an invite.
|
||||
|
||||
When joining a room with restricted joined rules (as defined in MSC3083),
|
||||
the membership of spaces must be checked during join.
|
||||
|
||||
Args:
|
||||
state_ids: The state of the room as it currently is.
|
||||
room_version: The room version of the room being joined.
|
||||
user_id: The user joining the room.
|
||||
|
||||
Returns:
|
||||
True if the user can join the room, false otherwise.
|
||||
"""
|
||||
# This only applies to room versions which support the new join rule.
|
||||
if not room_version.msc3083_join_rules:
|
||||
return True
|
||||
|
||||
# If there's no join rule, then it defaults to public (so this doesn't apply).
|
||||
join_rules_event_id = state_ids.get((EventTypes.JoinRules, ""), None)
|
||||
if not join_rules_event_id:
|
||||
return True
|
||||
|
||||
# If the join rule is not restricted, this doesn't apply.
|
||||
join_rules_event = await self.store.get_event(join_rules_event_id)
|
||||
if join_rules_event.content.get("join_rule") != JoinRules.MSC3083_RESTRICTED:
|
||||
return True
|
||||
|
||||
# If allowed is of the wrong form, then only allow invited users.
|
||||
allowed_spaces = join_rules_event.content.get("allow", [])
|
||||
if not isinstance(allowed_spaces, list):
|
||||
return False
|
||||
|
||||
# Get the list of joined rooms and see if there's an overlap.
|
||||
joined_rooms = await self.store.get_rooms_for_user(user_id)
|
||||
|
||||
# Pull out the other room IDs, invalid data gets filtered.
|
||||
for space in allowed_spaces:
|
||||
if not isinstance(space, dict):
|
||||
continue
|
||||
|
||||
space_id = space.get("space")
|
||||
if not isinstance(space_id, str):
|
||||
continue
|
||||
|
||||
# The user was joined to one of the spaces specified, they can join
|
||||
# this room!
|
||||
if space_id in joined_rooms:
|
||||
return True
|
||||
|
||||
# The user was not in any of the required spaces.
|
||||
return False
|
||||
|
||||
async def _local_membership_update(
|
||||
self,
|
||||
requester: Requester,
|
||||
|
@ -236,9 +293,25 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
|||
|
||||
if event.membership == Membership.JOIN:
|
||||
newly_joined = True
|
||||
user_is_invited = False
|
||||
if prev_member_event_id:
|
||||
prev_member_event = await self.store.get_event(prev_member_event_id)
|
||||
newly_joined = prev_member_event.membership != Membership.JOIN
|
||||
user_is_invited = prev_member_event.membership == Membership.INVITE
|
||||
|
||||
# If the member is not already in the room and is not accepting an invite,
|
||||
# check if they should be allowed access via membership in a space.
|
||||
if (
|
||||
newly_joined
|
||||
and not user_is_invited
|
||||
and not await self._can_join_without_invite(
|
||||
prev_state_ids, event.room_version, user_id
|
||||
)
|
||||
):
|
||||
raise AuthError(
|
||||
403,
|
||||
"You do not belong to any of the required spaces to join this room.",
|
||||
)
|
||||
|
||||
# Only rate-limit if the user actually joined the room, otherwise we'll end
|
||||
# up blocking profile updates.
|
||||
|
|
|
@ -549,7 +549,7 @@ class SyncHandler:
|
|||
)
|
||||
|
||||
async def get_state_after_event(
|
||||
self, event: EventBase, state_filter: StateFilter = StateFilter.all()
|
||||
self, event: EventBase, state_filter: Optional[StateFilter] = None
|
||||
) -> StateMap[str]:
|
||||
"""
|
||||
Get the room state after the given event
|
||||
|
@ -559,7 +559,7 @@ class SyncHandler:
|
|||
state_filter: The state filter used to fetch state from the database.
|
||||
"""
|
||||
state_ids = await self.state_store.get_state_ids_for_event(
|
||||
event.event_id, state_filter=state_filter
|
||||
event.event_id, state_filter=state_filter or StateFilter.all()
|
||||
)
|
||||
if event.is_state():
|
||||
state_ids = dict(state_ids)
|
||||
|
@ -570,7 +570,7 @@ class SyncHandler:
|
|||
self,
|
||||
room_id: str,
|
||||
stream_position: StreamToken,
|
||||
state_filter: StateFilter = StateFilter.all(),
|
||||
state_filter: Optional[StateFilter] = None,
|
||||
) -> StateMap[str]:
|
||||
"""Get the room state at a particular stream position
|
||||
|
||||
|
@ -590,7 +590,7 @@ class SyncHandler:
|
|||
if last_events:
|
||||
last_event = last_events[-1]
|
||||
state = await self.get_state_after_event(
|
||||
last_event, state_filter=state_filter
|
||||
last_event, state_filter=state_filter or StateFilter.all()
|
||||
)
|
||||
|
||||
else:
|
||||
|
|
|
@ -297,7 +297,7 @@ class SimpleHttpClient:
|
|||
def __init__(
|
||||
self,
|
||||
hs: "HomeServer",
|
||||
treq_args: Dict[str, Any] = {},
|
||||
treq_args: Optional[Dict[str, Any]] = None,
|
||||
ip_whitelist: Optional[IPSet] = None,
|
||||
ip_blacklist: Optional[IPSet] = None,
|
||||
use_proxy: bool = False,
|
||||
|
@ -317,7 +317,7 @@ class SimpleHttpClient:
|
|||
|
||||
self._ip_whitelist = ip_whitelist
|
||||
self._ip_blacklist = ip_blacklist
|
||||
self._extra_treq_args = treq_args
|
||||
self._extra_treq_args = treq_args or {}
|
||||
|
||||
self.user_agent = hs.version_string
|
||||
self.clock = hs.get_clock()
|
||||
|
|
|
@ -272,7 +272,7 @@ class MatrixFederationHttpClient:
|
|||
self,
|
||||
request: MatrixFederationRequest,
|
||||
try_trailing_slash_on_400: bool = False,
|
||||
**send_request_args
|
||||
**send_request_args,
|
||||
) -> IResponse:
|
||||
"""Wrapper for _send_request which can optionally retry the request
|
||||
upon receiving a combination of a 400 HTTP response code and a
|
||||
|
|
|
@ -27,7 +27,7 @@ from twisted.python.failure import Failure
|
|||
from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase
|
||||
from twisted.web.error import SchemeNotSupported
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web.iweb import IAgent
|
||||
from twisted.web.iweb import IAgent, IPolicyForHTTPS
|
||||
|
||||
from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
|
||||
|
||||
|
@ -88,12 +88,14 @@ class ProxyAgent(_AgentBase):
|
|||
self,
|
||||
reactor,
|
||||
proxy_reactor=None,
|
||||
contextFactory=BrowserLikePolicyForHTTPS(),
|
||||
contextFactory: Optional[IPolicyForHTTPS] = None,
|
||||
connectTimeout=None,
|
||||
bindAddress=None,
|
||||
pool=None,
|
||||
use_proxy=False,
|
||||
):
|
||||
contextFactory = contextFactory or BrowserLikePolicyForHTTPS()
|
||||
|
||||
_AgentBase.__init__(self, reactor, pool)
|
||||
|
||||
if proxy_reactor is None:
|
||||
|
|
|
@ -497,7 +497,7 @@ class SynapseSite(Site):
|
|||
resource,
|
||||
server_version_string,
|
||||
*args,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
Site.__init__(self, resource, *args, **kwargs)
|
||||
|
||||
|
|
|
@ -486,7 +486,7 @@ def start_active_span_from_request(
|
|||
def start_active_span_from_edu(
|
||||
edu_content,
|
||||
operation_name,
|
||||
references=[],
|
||||
references: Optional[list] = None,
|
||||
tags=None,
|
||||
start_time=None,
|
||||
ignore_active_span=False,
|
||||
|
@ -501,6 +501,7 @@ def start_active_span_from_edu(
|
|||
|
||||
For the other args see opentracing.tracer
|
||||
"""
|
||||
references = references or []
|
||||
|
||||
if opentracing is None:
|
||||
return noop_context_manager()
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Generator, Iterable, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Any, Generator, Iterable, List, Optional, Tuple
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
@ -127,7 +127,7 @@ class ModuleApi:
|
|||
return defer.ensureDeferred(self._auth_handler.check_user_exists(user_id))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def register(self, localpart, displayname=None, emails=[]):
|
||||
def register(self, localpart, displayname=None, emails: Optional[List[str]] = None):
|
||||
"""Registers a new user with given localpart and optional displayname, emails.
|
||||
|
||||
Also returns an access token for the new user.
|
||||
|
@ -147,11 +147,13 @@ class ModuleApi:
|
|||
logger.warning(
|
||||
"Using deprecated ModuleApi.register which creates a dummy user device."
|
||||
)
|
||||
user_id = yield self.register_user(localpart, displayname, emails)
|
||||
user_id = yield self.register_user(localpart, displayname, emails or [])
|
||||
_, access_token = yield self.register_device(user_id)
|
||||
return user_id, access_token
|
||||
|
||||
def register_user(self, localpart, displayname=None, emails=[]):
|
||||
def register_user(
|
||||
self, localpart, displayname=None, emails: Optional[List[str]] = None
|
||||
):
|
||||
"""Registers a new user with given localpart and optional displayname, emails.
|
||||
|
||||
Args:
|
||||
|
@ -170,7 +172,7 @@ class ModuleApi:
|
|||
self._hs.get_registration_handler().register_user(
|
||||
localpart=localpart,
|
||||
default_display_name=displayname,
|
||||
bind_emails=emails,
|
||||
bind_emails=emails or [],
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -276,7 +276,7 @@ class Notifier:
|
|||
event: EventBase,
|
||||
event_pos: PersistedEventPosition,
|
||||
max_room_stream_token: RoomStreamToken,
|
||||
extra_users: Collection[UserID] = [],
|
||||
extra_users: Optional[Collection[UserID]] = None,
|
||||
):
|
||||
"""Unwraps event and calls `on_new_room_event_args`."""
|
||||
self.on_new_room_event_args(
|
||||
|
@ -286,7 +286,7 @@ class Notifier:
|
|||
state_key=event.get("state_key"),
|
||||
membership=event.content.get("membership"),
|
||||
max_room_stream_token=max_room_stream_token,
|
||||
extra_users=extra_users,
|
||||
extra_users=extra_users or [],
|
||||
)
|
||||
|
||||
def on_new_room_event_args(
|
||||
|
@ -297,7 +297,7 @@ class Notifier:
|
|||
membership: Optional[str],
|
||||
event_pos: PersistedEventPosition,
|
||||
max_room_stream_token: RoomStreamToken,
|
||||
extra_users: Collection[UserID] = [],
|
||||
extra_users: Optional[Collection[UserID]] = None,
|
||||
):
|
||||
"""Used by handlers to inform the notifier something has happened
|
||||
in the room, room event wise.
|
||||
|
@ -313,7 +313,7 @@ class Notifier:
|
|||
self.pending_new_room_events.append(
|
||||
_PendingRoomEventEntry(
|
||||
event_pos=event_pos,
|
||||
extra_users=extra_users,
|
||||
extra_users=extra_users or [],
|
||||
room_id=room_id,
|
||||
type=event_type,
|
||||
state_key=state_key,
|
||||
|
@ -382,14 +382,14 @@ class Notifier:
|
|||
self,
|
||||
stream_key: str,
|
||||
new_token: Union[int, RoomStreamToken],
|
||||
users: Collection[Union[str, UserID]] = [],
|
||||
users: Optional[Collection[Union[str, UserID]]] = None,
|
||||
):
|
||||
try:
|
||||
stream_token = None
|
||||
if isinstance(new_token, int):
|
||||
stream_token = new_token
|
||||
self.appservice_handler.notify_interested_services_ephemeral(
|
||||
stream_key, stream_token, users
|
||||
stream_key, stream_token, users or []
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Error notifying application services of event")
|
||||
|
@ -404,13 +404,16 @@ class Notifier:
|
|||
self,
|
||||
stream_key: str,
|
||||
new_token: Union[int, RoomStreamToken],
|
||||
users: Collection[Union[str, UserID]] = [],
|
||||
rooms: Collection[str] = [],
|
||||
users: Optional[Collection[Union[str, UserID]]] = None,
|
||||
rooms: Optional[Collection[str]] = None,
|
||||
):
|
||||
"""Used to inform listeners that something has happened event wise.
|
||||
|
||||
Will wake up all listeners for the given users and rooms.
|
||||
"""
|
||||
users = users or []
|
||||
rooms = rooms or []
|
||||
|
||||
with Measure(self.clock, "on_new_event"):
|
||||
user_streams = set()
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@ from synapse.rest.admin.users import (
|
|||
AccountValidityRenewServlet,
|
||||
DeactivateAccountRestServlet,
|
||||
PushersRestServlet,
|
||||
RateLimitRestServlet,
|
||||
ResetPasswordRestServlet,
|
||||
SearchUsersRestServlet,
|
||||
ShadowBanRestServlet,
|
||||
|
@ -62,7 +63,6 @@ from synapse.rest.admin.users import (
|
|||
UserMembershipRestServlet,
|
||||
UserRegisterServlet,
|
||||
UserRestServletV2,
|
||||
UsersRestServlet,
|
||||
UsersRestServletV2,
|
||||
UserTokenRestServlet,
|
||||
WhoisRestServlet,
|
||||
|
@ -240,6 +240,7 @@ def register_servlets(hs, http_server):
|
|||
ShadowBanRestServlet(hs).register(http_server)
|
||||
ForwardExtremitiesRestServlet(hs).register(http_server)
|
||||
RoomEventContextServlet(hs).register(http_server)
|
||||
RateLimitRestServlet(hs).register(http_server)
|
||||
|
||||
|
||||
def register_servlets_for_client_rest_resource(hs, http_server):
|
||||
|
@ -248,7 +249,6 @@ def register_servlets_for_client_rest_resource(hs, http_server):
|
|||
PurgeHistoryStatusRestServlet(hs).register(http_server)
|
||||
DeactivateAccountRestServlet(hs).register(http_server)
|
||||
PurgeHistoryRestServlet(hs).register(http_server)
|
||||
UsersRestServlet(hs).register(http_server)
|
||||
ResetPasswordRestServlet(hs).register(http_server)
|
||||
SearchUsersRestServlet(hs).register(http_server)
|
||||
ShutdownRoomRestServlet(hs).register(http_server)
|
||||
|
|
|
@ -45,29 +45,6 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UsersRestServlet(RestServlet):
|
||||
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
self.auth = hs.get_auth()
|
||||
self.admin_handler = hs.get_admin_handler()
|
||||
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, List[JsonDict]]:
|
||||
target_user = UserID.from_string(user_id)
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "Can only users a local user")
|
||||
|
||||
ret = await self.store.get_users()
|
||||
|
||||
return 200, ret
|
||||
|
||||
|
||||
class UsersRestServletV2(RestServlet):
|
||||
PATTERNS = admin_patterns("/users$", "v2")
|
||||
|
||||
|
@ -1004,3 +981,114 @@ class ShadowBanRestServlet(RestServlet):
|
|||
await self.store.set_shadow_banned(UserID.from_string(user_id), True)
|
||||
|
||||
return 200, {}
|
||||
|
||||
|
||||
class RateLimitRestServlet(RestServlet):
|
||||
"""An admin API to override ratelimiting for an user.
|
||||
|
||||
Example:
|
||||
POST /_synapse/admin/v1/users/@test:example.com/override_ratelimit
|
||||
{
|
||||
"messages_per_second": 0,
|
||||
"burst_count": 0
|
||||
}
|
||||
200 OK
|
||||
{
|
||||
"messages_per_second": 0,
|
||||
"burst_count": 0
|
||||
}
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/override_ratelimit")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
if not self.hs.is_mine_id(user_id):
|
||||
raise SynapseError(400, "Can only lookup local users")
|
||||
|
||||
if not await self.store.get_user_by_id(user_id):
|
||||
raise NotFoundError("User not found")
|
||||
|
||||
ratelimit = await self.store.get_ratelimit_for_user(user_id)
|
||||
|
||||
if ratelimit:
|
||||
# convert `null` to `0` for consistency
|
||||
# both values do the same in retelimit handler
|
||||
ret = {
|
||||
"messages_per_second": 0
|
||||
if ratelimit.messages_per_second is None
|
||||
else ratelimit.messages_per_second,
|
||||
"burst_count": 0
|
||||
if ratelimit.burst_count is None
|
||||
else ratelimit.burst_count,
|
||||
}
|
||||
else:
|
||||
ret = {}
|
||||
|
||||
return 200, ret
|
||||
|
||||
async def on_POST(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
if not self.hs.is_mine_id(user_id):
|
||||
raise SynapseError(400, "Only local users can be ratelimited")
|
||||
|
||||
if not await self.store.get_user_by_id(user_id):
|
||||
raise NotFoundError("User not found")
|
||||
|
||||
body = parse_json_object_from_request(request, allow_empty_body=True)
|
||||
|
||||
messages_per_second = body.get("messages_per_second", 0)
|
||||
burst_count = body.get("burst_count", 0)
|
||||
|
||||
if not isinstance(messages_per_second, int) or messages_per_second < 0:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"%r parameter must be a positive int" % (messages_per_second,),
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if not isinstance(burst_count, int) or burst_count < 0:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"%r parameter must be a positive int" % (burst_count,),
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
await self.store.set_ratelimit_for_user(
|
||||
user_id, messages_per_second, burst_count
|
||||
)
|
||||
ratelimit = await self.store.get_ratelimit_for_user(user_id)
|
||||
assert ratelimit is not None
|
||||
|
||||
ret = {
|
||||
"messages_per_second": ratelimit.messages_per_second,
|
||||
"burst_count": ratelimit.burst_count,
|
||||
}
|
||||
|
||||
return 200, ret
|
||||
|
||||
async def on_DELETE(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
if not self.hs.is_mine_id(user_id):
|
||||
raise SynapseError(400, "Only local users can be ratelimited")
|
||||
|
||||
if not await self.store.get_user_by_id(user_id):
|
||||
raise NotFoundError("User not found")
|
||||
|
||||
await self.store.delete_ratelimit_for_user(user_id)
|
||||
|
||||
return 200, {}
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hmac
|
||||
import logging
|
||||
import random
|
||||
|
@ -22,7 +21,7 @@ from typing import List, Union
|
|||
import synapse
|
||||
import synapse.api.auth
|
||||
import synapse.types
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.api.constants import APP_SERVICE_REGISTRATION_TYPE, LoginType
|
||||
from synapse.api.errors import (
|
||||
Codes,
|
||||
InteractiveAuthIncompleteError,
|
||||
|
@ -430,15 +429,20 @@ class RegisterRestServlet(RestServlet):
|
|||
raise SynapseError(400, "Invalid username")
|
||||
desired_username = body["username"]
|
||||
|
||||
appservice = None
|
||||
if self.auth.has_access_token(request):
|
||||
appservice = self.auth.get_appservice_by_req(request)
|
||||
|
||||
# fork off as soon as possible for ASes which have completely
|
||||
# different registration flows to normal users
|
||||
|
||||
# == Application Service Registration ==
|
||||
if appservice:
|
||||
if body.get("type") == APP_SERVICE_REGISTRATION_TYPE:
|
||||
if not self.auth.has_access_token(request):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Appservice token must be provided when using a type of m.login.application_service",
|
||||
)
|
||||
|
||||
# Verify the AS
|
||||
self.auth.get_appservice_by_req(request)
|
||||
|
||||
# Set the desired user according to the AS API (which uses the
|
||||
# 'user' key not 'username'). Since this is a new addition, we'll
|
||||
# fallback to 'username' if they gave one.
|
||||
|
@ -459,6 +463,11 @@ class RegisterRestServlet(RestServlet):
|
|||
)
|
||||
|
||||
return 200, result
|
||||
elif self.auth.has_access_token(request):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"An access token should not be provided on requests to /register (except if type is m.login.application_service)",
|
||||
)
|
||||
|
||||
# == Normal User Registration == (everyone else)
|
||||
if not self._registration_enabled:
|
||||
|
|
|
@ -488,7 +488,7 @@ class DatabasePool:
|
|||
exception_callbacks: List[_CallbackListEntry],
|
||||
func: "Callable[..., R]",
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
**kwargs: Any,
|
||||
) -> R:
|
||||
"""Start a new database transaction with the given connection.
|
||||
|
||||
|
@ -622,7 +622,7 @@ class DatabasePool:
|
|||
func: "Callable[..., R]",
|
||||
*args: Any,
|
||||
db_autocommit: bool = False,
|
||||
**kwargs: Any
|
||||
**kwargs: Any,
|
||||
) -> R:
|
||||
"""Starts a transaction on the database and runs a given function
|
||||
|
||||
|
@ -682,7 +682,7 @@ class DatabasePool:
|
|||
func: "Callable[..., R]",
|
||||
*args: Any,
|
||||
db_autocommit: bool = False,
|
||||
**kwargs: Any
|
||||
**kwargs: Any,
|
||||
) -> R:
|
||||
"""Wraps the .runWithConnection() method on the underlying db_pool.
|
||||
|
||||
|
@ -775,7 +775,7 @@ class DatabasePool:
|
|||
desc: str,
|
||||
decoder: Optional[Callable[[Cursor], R]],
|
||||
query: str,
|
||||
*args: Any
|
||||
*args: Any,
|
||||
) -> R:
|
||||
"""Runs a single query for a result set.
|
||||
|
||||
|
@ -900,7 +900,7 @@ class DatabasePool:
|
|||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Dict[str, Any] = {},
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
desc: str = "simple_upsert",
|
||||
lock: bool = True,
|
||||
) -> Optional[bool]:
|
||||
|
@ -927,6 +927,8 @@ class DatabasePool:
|
|||
Native upserts always return None. Emulated upserts return True if a
|
||||
new entry was created, False if an existing one was updated.
|
||||
"""
|
||||
insertion_values = insertion_values or {}
|
||||
|
||||
attempts = 0
|
||||
while True:
|
||||
try:
|
||||
|
@ -964,7 +966,7 @@ class DatabasePool:
|
|||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Dict[str, Any] = {},
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
lock: bool = True,
|
||||
) -> Optional[bool]:
|
||||
"""
|
||||
|
@ -982,6 +984,8 @@ class DatabasePool:
|
|||
Native upserts always return None. Emulated upserts return True if a
|
||||
new entry was created, False if an existing one was updated.
|
||||
"""
|
||||
insertion_values = insertion_values or {}
|
||||
|
||||
if self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables:
|
||||
self.simple_upsert_txn_native_upsert(
|
||||
txn, table, keyvalues, values, insertion_values=insertion_values
|
||||
|
@ -1003,7 +1007,7 @@ class DatabasePool:
|
|||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Dict[str, Any] = {},
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
lock: bool = True,
|
||||
) -> bool:
|
||||
"""
|
||||
|
@ -1017,6 +1021,8 @@ class DatabasePool:
|
|||
Returns True if a new entry was created, False if an existing
|
||||
one was updated.
|
||||
"""
|
||||
insertion_values = insertion_values or {}
|
||||
|
||||
# We need to lock the table :(, unless we're *really* careful
|
||||
if lock:
|
||||
self.engine.lock_table(txn, table)
|
||||
|
@ -1077,7 +1083,7 @@ class DatabasePool:
|
|||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Dict[str, Any] = {},
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Use the native UPSERT functionality in recent PostgreSQL versions.
|
||||
|
@ -1090,7 +1096,7 @@ class DatabasePool:
|
|||
"""
|
||||
allvalues = {} # type: Dict[str, Any]
|
||||
allvalues.update(keyvalues)
|
||||
allvalues.update(insertion_values)
|
||||
allvalues.update(insertion_values or {})
|
||||
|
||||
if not values:
|
||||
latter = "NOTHING"
|
||||
|
@ -1513,7 +1519,7 @@ class DatabasePool:
|
|||
column: str,
|
||||
iterable: Iterable[Any],
|
||||
retcols: Iterable[str],
|
||||
keyvalues: Dict[str, Any] = {},
|
||||
keyvalues: Optional[Dict[str, Any]] = None,
|
||||
desc: str = "simple_select_many_batch",
|
||||
batch_size: int = 100,
|
||||
) -> List[Any]:
|
||||
|
@ -1531,6 +1537,8 @@ class DatabasePool:
|
|||
desc: description of the transaction, for logging and metrics
|
||||
batch_size: the number of rows for each select query
|
||||
"""
|
||||
keyvalues = keyvalues or {}
|
||||
|
||||
results = [] # type: List[Dict[str, Any]]
|
||||
|
||||
if not iterable:
|
||||
|
@ -2059,69 +2067,18 @@ def make_in_list_sql_clause(
|
|||
KV = TypeVar("KV")
|
||||
|
||||
|
||||
def make_tuple_comparison_clause(
|
||||
database_engine: BaseDatabaseEngine, keys: List[Tuple[str, KV]]
|
||||
) -> Tuple[str, List[KV]]:
|
||||
def make_tuple_comparison_clause(keys: List[Tuple[str, KV]]) -> Tuple[str, List[KV]]:
|
||||
"""Returns a tuple comparison SQL clause
|
||||
|
||||
Depending what the SQL engine supports, builds a SQL clause that looks like either
|
||||
"(a, b) > (?, ?)", or "(a > ?) OR (a == ? AND b > ?)".
|
||||
Builds a SQL clause that looks like "(a, b) > (?, ?)"
|
||||
|
||||
Args:
|
||||
database_engine
|
||||
keys: A set of (column, value) pairs to be compared.
|
||||
|
||||
Returns:
|
||||
A tuple of SQL query and the args
|
||||
"""
|
||||
if database_engine.supports_tuple_comparison:
|
||||
return (
|
||||
"(%s) > (%s)" % (",".join(k[0] for k in keys), ",".join("?" for _ in keys)),
|
||||
[k[1] for k in keys],
|
||||
)
|
||||
|
||||
# we want to build a clause
|
||||
# (a > ?) OR
|
||||
# (a == ? AND b > ?) OR
|
||||
# (a == ? AND b == ? AND c > ?)
|
||||
# ...
|
||||
# (a == ? AND b == ? AND ... AND z > ?)
|
||||
#
|
||||
# or, equivalently:
|
||||
#
|
||||
# (a > ? OR (a == ? AND
|
||||
# (b > ? OR (b == ? AND
|
||||
# ...
|
||||
# (y > ? OR (y == ? AND
|
||||
# z > ?
|
||||
# ))
|
||||
# ...
|
||||
# ))
|
||||
# ))
|
||||
#
|
||||
# which itself is equivalent to (and apparently easier for the query optimiser):
|
||||
#
|
||||
# (a >= ? AND (a > ? OR
|
||||
# (b >= ? AND (b > ? OR
|
||||
# ...
|
||||
# (y >= ? AND (y > ? OR
|
||||
# z > ?
|
||||
# ))
|
||||
# ...
|
||||
# ))
|
||||
# ))
|
||||
#
|
||||
#
|
||||
|
||||
clause = ""
|
||||
args = [] # type: List[KV]
|
||||
for k, v in keys[:-1]:
|
||||
clause = clause + "(%s >= ? AND (%s > ? OR " % (k, k)
|
||||
args.extend([v, v])
|
||||
|
||||
(k, v) = keys[-1]
|
||||
clause += "%s > ?" % (k,)
|
||||
args.append(v)
|
||||
|
||||
clause += "))" * (len(keys) - 1)
|
||||
return clause, args
|
||||
return (
|
||||
"(%s) > (%s)" % (",".join(k[0] for k in keys), ",".join("?" for _ in keys)),
|
||||
[k[1] for k in keys],
|
||||
)
|
||||
|
|
|
@ -298,7 +298,6 @@ class ClientIpBackgroundUpdateStore(SQLBaseStore):
|
|||
# times, which is fine.
|
||||
|
||||
where_clause, where_args = make_tuple_comparison_clause(
|
||||
self.database_engine,
|
||||
[("user_id", last_user_id), ("device_id", last_device_id)],
|
||||
)
|
||||
|
||||
|
|
|
@ -985,7 +985,7 @@ class DeviceBackgroundUpdateStore(SQLBaseStore):
|
|||
|
||||
def _txn(txn):
|
||||
clause, args = make_tuple_comparison_clause(
|
||||
self.db_pool.engine, [(x, last_row[x]) for x in KEY_COLS]
|
||||
[(x, last_row[x]) for x in KEY_COLS]
|
||||
)
|
||||
sql = """
|
||||
SELECT stream_id, destination, user_id, device_id, MAX(ts) AS ts
|
||||
|
|
|
@ -320,8 +320,8 @@ class PersistEventsStore:
|
|||
txn: LoggingTransaction,
|
||||
events_and_contexts: List[Tuple[EventBase, EventContext]],
|
||||
backfilled: bool,
|
||||
state_delta_for_room: Dict[str, DeltaState] = {},
|
||||
new_forward_extremeties: Dict[str, List[str]] = {},
|
||||
state_delta_for_room: Optional[Dict[str, DeltaState]] = None,
|
||||
new_forward_extremeties: Optional[Dict[str, List[str]]] = None,
|
||||
):
|
||||
"""Insert some number of room events into the necessary database tables.
|
||||
|
||||
|
@ -342,6 +342,9 @@ class PersistEventsStore:
|
|||
extremities.
|
||||
|
||||
"""
|
||||
state_delta_for_room = state_delta_for_room or {}
|
||||
new_forward_extremeties = new_forward_extremeties or {}
|
||||
|
||||
all_events_and_contexts = events_and_contexts
|
||||
|
||||
min_stream_order = events_and_contexts[0][0].internal_metadata.stream_ordering
|
||||
|
|
|
@ -838,7 +838,6 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
|||
# We want to do a `(topological_ordering, stream_ordering) > (?,?)`
|
||||
# comparison, but that is not supported on older SQLite versions
|
||||
tuple_clause, tuple_args = make_tuple_comparison_clause(
|
||||
self.database_engine,
|
||||
[
|
||||
("events.room_id", last_room_id),
|
||||
("topological_ordering", last_depth),
|
||||
|
|
|
@ -1171,7 +1171,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
|||
user_id: str,
|
||||
membership: str,
|
||||
is_admin: bool = False,
|
||||
content: JsonDict = {},
|
||||
content: Optional[JsonDict] = None,
|
||||
local_attestation: Optional[dict] = None,
|
||||
remote_attestation: Optional[dict] = None,
|
||||
is_publicised: bool = False,
|
||||
|
@ -1192,6 +1192,8 @@ class GroupServerStore(GroupServerWorkerStore):
|
|||
is_publicised: Whether this should be publicised.
|
||||
"""
|
||||
|
||||
content = content or {}
|
||||
|
||||
def _register_user_group_membership_txn(txn, next_id):
|
||||
# TODO: Upsert?
|
||||
self.db_pool.simple_delete_txn(
|
||||
|
|
|
@ -521,13 +521,11 @@ class RoomWorkerStore(SQLBaseStore):
|
|||
)
|
||||
|
||||
@cached(max_entries=10000)
|
||||
async def get_ratelimit_for_user(self, user_id):
|
||||
"""Check if there are any overrides for ratelimiting for the given
|
||||
user
|
||||
async def get_ratelimit_for_user(self, user_id: str) -> Optional[RatelimitOverride]:
|
||||
"""Check if there are any overrides for ratelimiting for the given user
|
||||
|
||||
Args:
|
||||
user_id (str)
|
||||
|
||||
user_id: user ID of the user
|
||||
Returns:
|
||||
RatelimitOverride if there is an override, else None. If the contents
|
||||
of RatelimitOverride are None or 0 then ratelimitng has been
|
||||
|
@ -549,6 +547,62 @@ class RoomWorkerStore(SQLBaseStore):
|
|||
else:
|
||||
return None
|
||||
|
||||
async def set_ratelimit_for_user(
|
||||
self, user_id: str, messages_per_second: int, burst_count: int
|
||||
) -> None:
|
||||
"""Sets whether a user is set an overridden ratelimit.
|
||||
Args:
|
||||
user_id: user ID of the user
|
||||
messages_per_second: The number of actions that can be performed in a second.
|
||||
burst_count: How many actions that can be performed before being limited.
|
||||
"""
|
||||
|
||||
def set_ratelimit_txn(txn):
|
||||
self.db_pool.simple_upsert_txn(
|
||||
txn,
|
||||
table="ratelimit_override",
|
||||
keyvalues={"user_id": user_id},
|
||||
values={
|
||||
"messages_per_second": messages_per_second,
|
||||
"burst_count": burst_count,
|
||||
},
|
||||
)
|
||||
|
||||
self._invalidate_cache_and_stream(
|
||||
txn, self.get_ratelimit_for_user, (user_id,)
|
||||
)
|
||||
|
||||
await self.db_pool.runInteraction("set_ratelimit", set_ratelimit_txn)
|
||||
|
||||
async def delete_ratelimit_for_user(self, user_id: str) -> None:
|
||||
"""Delete an overridden ratelimit for a user.
|
||||
Args:
|
||||
user_id: user ID of the user
|
||||
"""
|
||||
|
||||
def delete_ratelimit_txn(txn):
|
||||
row = self.db_pool.simple_select_one_txn(
|
||||
txn,
|
||||
table="ratelimit_override",
|
||||
keyvalues={"user_id": user_id},
|
||||
retcols=["user_id"],
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
if not row:
|
||||
return
|
||||
|
||||
# They are there, delete them.
|
||||
self.db_pool.simple_delete_one_txn(
|
||||
txn, "ratelimit_override", keyvalues={"user_id": user_id}
|
||||
)
|
||||
|
||||
self._invalidate_cache_and_stream(
|
||||
txn, self.get_ratelimit_for_user, (user_id,)
|
||||
)
|
||||
|
||||
await self.db_pool.runInteraction("delete_ratelimit", delete_ratelimit_txn)
|
||||
|
||||
@cached()
|
||||
async def get_retention_policy_for_room(self, room_id):
|
||||
"""Get the retention policy for a given room.
|
||||
|
|
|
@ -190,7 +190,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
|
|||
|
||||
# FIXME: how should this be cached?
|
||||
async def get_filtered_current_state_ids(
|
||||
self, room_id: str, state_filter: StateFilter = StateFilter.all()
|
||||
self, room_id: str, state_filter: Optional[StateFilter] = None
|
||||
) -> StateMap[str]:
|
||||
"""Get the current state event of a given type for a room based on the
|
||||
current_state_events table. This may not be as up-to-date as the result
|
||||
|
@ -205,7 +205,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
|
|||
Map from type/state_key to event ID.
|
||||
"""
|
||||
|
||||
where_clause, where_args = state_filter.make_sql_filter_clause()
|
||||
where_clause, where_args = (
|
||||
state_filter or StateFilter.all()
|
||||
).make_sql_filter_clause()
|
||||
|
||||
if not where_clause:
|
||||
# We delegate to the cached version
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.database import DatabasePool
|
||||
|
@ -73,8 +74,10 @@ class StateGroupBackgroundUpdateStore(SQLBaseStore):
|
|||
return count
|
||||
|
||||
def _get_state_groups_from_groups_txn(
|
||||
self, txn, groups, state_filter=StateFilter.all()
|
||||
self, txn, groups, state_filter: Optional[StateFilter] = None
|
||||
):
|
||||
state_filter = state_filter or StateFilter.all()
|
||||
|
||||
results = {group: {} for group in groups}
|
||||
|
||||
where_clause, where_args = state_filter.make_sql_filter_clause()
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
from typing import Dict, Iterable, List, Set, Tuple
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
|
@ -210,7 +210,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
|
|||
return state_filter.filter_state(state_dict_ids), not missing_types
|
||||
|
||||
async def _get_state_for_groups(
|
||||
self, groups: Iterable[int], state_filter: StateFilter = StateFilter.all()
|
||||
self, groups: Iterable[int], state_filter: Optional[StateFilter] = None
|
||||
) -> Dict[int, MutableStateMap[str]]:
|
||||
"""Gets the state at each of a list of state groups, optionally
|
||||
filtering by type/state_key
|
||||
|
@ -223,6 +223,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
|
|||
Returns:
|
||||
Dict of state group to state map.
|
||||
"""
|
||||
state_filter = state_filter or StateFilter.all()
|
||||
|
||||
member_filter, non_member_filter = state_filter.get_member_split()
|
||||
|
||||
|
|
|
@ -42,14 +42,6 @@ class BaseDatabaseEngine(Generic[ConnectionType], metaclass=abc.ABCMeta):
|
|||
"""
|
||||
...
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def supports_tuple_comparison(self) -> bool:
|
||||
"""
|
||||
Do we support comparing tuples, i.e. `(a, b) > (c, d)`?
|
||||
"""
|
||||
...
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def supports_using_any_list(self) -> bool:
|
||||
|
|
|
@ -47,8 +47,8 @@ class PostgresEngine(BaseDatabaseEngine):
|
|||
self._version = db_conn.server_version
|
||||
|
||||
# Are we on a supported PostgreSQL version?
|
||||
if not allow_outdated_version and self._version < 90500:
|
||||
raise RuntimeError("Synapse requires PostgreSQL 9.5+ or above.")
|
||||
if not allow_outdated_version and self._version < 90600:
|
||||
raise RuntimeError("Synapse requires PostgreSQL 9.6 or above.")
|
||||
|
||||
with db_conn.cursor() as txn:
|
||||
txn.execute("SHOW SERVER_ENCODING")
|
||||
|
@ -129,13 +129,6 @@ class PostgresEngine(BaseDatabaseEngine):
|
|||
"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def supports_tuple_comparison(self):
|
||||
"""
|
||||
Do we support comparing tuples, i.e. `(a, b) > (c, d)`?
|
||||
"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def supports_using_any_list(self):
|
||||
"""Do we support using `a = ANY(?)` and passing a list"""
|
||||
|
|
|
@ -56,14 +56,6 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
|
|||
"""
|
||||
return self.module.sqlite_version_info >= (3, 24, 0)
|
||||
|
||||
@property
|
||||
def supports_tuple_comparison(self):
|
||||
"""
|
||||
Do we support comparing tuples, i.e. `(a, b) > (c, d)`? This requires
|
||||
SQLite 3.15+.
|
||||
"""
|
||||
return self.module.sqlite_version_info >= (3, 15, 0)
|
||||
|
||||
@property
|
||||
def supports_using_any_list(self):
|
||||
"""Do we support using `a = ANY(?)` and passing a list"""
|
||||
|
@ -72,8 +64,11 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
|
|||
def check_database(self, db_conn, allow_outdated_version: bool = False):
|
||||
if not allow_outdated_version:
|
||||
version = self.module.sqlite_version_info
|
||||
if version < (3, 11, 0):
|
||||
raise RuntimeError("Synapse requires sqlite 3.11 or above.")
|
||||
# Synapse is untested against older SQLite versions, and we don't want
|
||||
# to let users upgrade to a version of Synapse with broken support for their
|
||||
# sqlite version, because it risks leaving them with a half-upgraded db.
|
||||
if version < (3, 22, 0):
|
||||
raise RuntimeError("Synapse requires sqlite 3.22 or above.")
|
||||
|
||||
def check_new_database(self, txn):
|
||||
"""Gets called when setting up a brand new database. This allows us to
|
||||
|
|
|
@ -449,7 +449,7 @@ class StateGroupStorage:
|
|||
return self.stores.state._get_state_groups_from_groups(groups, state_filter)
|
||||
|
||||
async def get_state_for_events(
|
||||
self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all()
|
||||
self, event_ids: Iterable[str], state_filter: Optional[StateFilter] = None
|
||||
) -> Dict[str, StateMap[EventBase]]:
|
||||
"""Given a list of event_ids and type tuples, return a list of state
|
||||
dicts for each event.
|
||||
|
@ -465,7 +465,7 @@ class StateGroupStorage:
|
|||
|
||||
groups = set(event_to_groups.values())
|
||||
group_to_state = await self.stores.state._get_state_for_groups(
|
||||
groups, state_filter
|
||||
groups, state_filter or StateFilter.all()
|
||||
)
|
||||
|
||||
state_event_map = await self.stores.main.get_events(
|
||||
|
@ -485,7 +485,7 @@ class StateGroupStorage:
|
|||
return {event: event_to_state[event] for event in event_ids}
|
||||
|
||||
async def get_state_ids_for_events(
|
||||
self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all()
|
||||
self, event_ids: Iterable[str], state_filter: Optional[StateFilter] = None
|
||||
) -> Dict[str, StateMap[str]]:
|
||||
"""
|
||||
Get the state dicts corresponding to a list of events, containing the event_ids
|
||||
|
@ -502,7 +502,7 @@ class StateGroupStorage:
|
|||
|
||||
groups = set(event_to_groups.values())
|
||||
group_to_state = await self.stores.state._get_state_for_groups(
|
||||
groups, state_filter
|
||||
groups, state_filter or StateFilter.all()
|
||||
)
|
||||
|
||||
event_to_state = {
|
||||
|
@ -513,7 +513,7 @@ class StateGroupStorage:
|
|||
return {event: event_to_state[event] for event in event_ids}
|
||||
|
||||
async def get_state_for_event(
|
||||
self, event_id: str, state_filter: StateFilter = StateFilter.all()
|
||||
self, event_id: str, state_filter: Optional[StateFilter] = None
|
||||
) -> StateMap[EventBase]:
|
||||
"""
|
||||
Get the state dict corresponding to a particular event
|
||||
|
@ -525,11 +525,13 @@ class StateGroupStorage:
|
|||
Returns:
|
||||
A dict from (type, state_key) -> state_event
|
||||
"""
|
||||
state_map = await self.get_state_for_events([event_id], state_filter)
|
||||
state_map = await self.get_state_for_events(
|
||||
[event_id], state_filter or StateFilter.all()
|
||||
)
|
||||
return state_map[event_id]
|
||||
|
||||
async def get_state_ids_for_event(
|
||||
self, event_id: str, state_filter: StateFilter = StateFilter.all()
|
||||
self, event_id: str, state_filter: Optional[StateFilter] = None
|
||||
) -> StateMap[str]:
|
||||
"""
|
||||
Get the state dict corresponding to a particular event
|
||||
|
@ -541,11 +543,13 @@ class StateGroupStorage:
|
|||
Returns:
|
||||
A dict from (type, state_key) -> state_event
|
||||
"""
|
||||
state_map = await self.get_state_ids_for_events([event_id], state_filter)
|
||||
state_map = await self.get_state_ids_for_events(
|
||||
[event_id], state_filter or StateFilter.all()
|
||||
)
|
||||
return state_map[event_id]
|
||||
|
||||
def _get_state_for_groups(
|
||||
self, groups: Iterable[int], state_filter: StateFilter = StateFilter.all()
|
||||
self, groups: Iterable[int], state_filter: Optional[StateFilter] = None
|
||||
) -> Awaitable[Dict[int, MutableStateMap[str]]]:
|
||||
"""Gets the state at each of a list of state groups, optionally
|
||||
filtering by type/state_key
|
||||
|
@ -558,7 +562,9 @@ class StateGroupStorage:
|
|||
Returns:
|
||||
Dict of state group to state map.
|
||||
"""
|
||||
return self.stores.state._get_state_for_groups(groups, state_filter)
|
||||
return self.stores.state._get_state_for_groups(
|
||||
groups, state_filter or StateFilter.all()
|
||||
)
|
||||
|
||||
async def store_state_group(
|
||||
self,
|
||||
|
|
|
@ -17,7 +17,7 @@ import logging
|
|||
import threading
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -91,7 +91,14 @@ class StreamIdGenerator:
|
|||
# ... persist event ...
|
||||
"""
|
||||
|
||||
def __init__(self, db_conn, table, column, extra_tables=[], step=1):
|
||||
def __init__(
|
||||
self,
|
||||
db_conn,
|
||||
table,
|
||||
column,
|
||||
extra_tables: Iterable[Tuple[str, str]] = (),
|
||||
step=1,
|
||||
):
|
||||
assert step != 0
|
||||
self._lock = threading.Lock()
|
||||
self._step = step
|
||||
|
|
|
@ -57,12 +57,14 @@ def enumerate_leaves(node, depth):
|
|||
class _Node:
|
||||
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
|
||||
|
||||
def __init__(self, prev_node, next_node, key, value, callbacks=set()):
|
||||
def __init__(
|
||||
self, prev_node, next_node, key, value, callbacks: Optional[set] = None
|
||||
):
|
||||
self.prev_node = prev_node
|
||||
self.next_node = next_node
|
||||
self.key = key
|
||||
self.value = value
|
||||
self.callbacks = callbacks
|
||||
self.callbacks = callbacks or set()
|
||||
|
||||
|
||||
class LruCache(Generic[KT, VT]):
|
||||
|
@ -176,10 +178,10 @@ class LruCache(Generic[KT, VT]):
|
|||
|
||||
self.len = synchronized(cache_len)
|
||||
|
||||
def add_node(key, value, callbacks=set()):
|
||||
def add_node(key, value, callbacks: Optional[set] = None):
|
||||
prev_node = list_root
|
||||
next_node = prev_node.next_node
|
||||
node = _Node(prev_node, next_node, key, value, callbacks)
|
||||
node = _Node(prev_node, next_node, key, value, callbacks or set())
|
||||
prev_node.next_node = node
|
||||
next_node.prev_node = node
|
||||
cache[key] = node
|
||||
|
@ -237,7 +239,7 @@ class LruCache(Generic[KT, VT]):
|
|||
def cache_get(
|
||||
key: KT,
|
||||
default: Optional[T] = None,
|
||||
callbacks: Iterable[Callable[[], None]] = [],
|
||||
callbacks: Iterable[Callable[[], None]] = (),
|
||||
update_metrics: bool = True,
|
||||
):
|
||||
node = cache.get(key, None)
|
||||
|
@ -253,7 +255,7 @@ class LruCache(Generic[KT, VT]):
|
|||
return default
|
||||
|
||||
@synchronized
|
||||
def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = []):
|
||||
def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()):
|
||||
node = cache.get(key, None)
|
||||
if node is not None:
|
||||
# We sometimes store large objects, e.g. dicts, which cause
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
import logging
|
||||
import warnings
|
||||
from io import StringIO
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from pyperf import perf_counter
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pymacaroons
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from mock import Mock, patch
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import re
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import time
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import attr
|
||||
import canonicaljson
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -314,7 +313,8 @@ class PresenceRouterTestCase(FederatingHomeserverTestCase):
|
|||
self.hs.get_federation_transport_client().send_transaction.call_args_list
|
||||
)
|
||||
for call in calls:
|
||||
federation_transaction = call.args[0] # type: Transaction
|
||||
call_args = call[0]
|
||||
federation_transaction = call_args[0] # type: Transaction
|
||||
|
||||
# Get the sent EDUs in this transaction
|
||||
edus = federation_transaction.get_dict()["edus"]
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.rest import admin
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from typing import List, Tuple
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.events import EventBase
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import Optional
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from signedjson import key, sign
|
||||
from signedjson.types import BaseKey, SigningKey
|
||||
|
|
|
@ -14,8 +14,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
from collections import Counter
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import synapse.api.errors
|
||||
import synapse.handlers.admin
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pymacaroons
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from synapse.handlers.cas_handler import CasResponse
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import synapse
|
||||
import synapse.api.errors
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from signedjson import key as key, sign as sign
|
||||
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
|
||||
|
|
|
@ -14,10 +14,9 @@
|
|||
# limitations under the License.
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from mock import ANY, Mock, patch
|
||||
|
||||
import pymacaroons
|
||||
|
||||
from synapse.handlers.sso import MappingException
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
"""Tests for the password_auth_provider interface"""
|
||||
|
||||
from typing import Any, Type, Union
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
|
||||
from mock import Mock, call
|
||||
from unittest.mock import Mock, call
|
||||
|
||||
from signedjson.key import generate_signing_key
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import synapse.types
|
||||
from synapse.api.errors import AuthError, SynapseError
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
from synapse.api.auth import Auth
|
||||
from synapse.api.constants import UserTypes
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from mock import Mock
|
||||
from unittest.mock import Mock
|
||||
|
||||
import attr
|
||||
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
|
||||
import json
|
||||
from typing import Dict
|
||||
|
||||
from mock import ANY, Mock, call
|
||||
from unittest.mock import ANY, Mock, call
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.web.resource import Resource
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue