Merge remote-tracking branch 'origin/develop' into matrix-org-hotfixes
commit
9d2d3bdee6
|
@ -27,10 +27,10 @@ which is under the Unlicense licence.
|
||||||
{{- . -}}{{- "\n" -}}
|
{{- . -}}{{- "\n" -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{- with .TestCases -}}
|
{{- with .TestCases -}}
|
||||||
{{- /* Failing tests are first */ -}}
|
{{- /* Passing tests are first */ -}}
|
||||||
{{- range . -}}
|
{{- range . -}}
|
||||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
{{- if eq .Result "PASS" -}}
|
||||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||||
{{- with .Coverage -}}
|
{{- with .Coverage -}}
|
||||||
, coverage: {{ . }}%
|
, coverage: {{ . }}%
|
||||||
|
@ -47,7 +47,6 @@ which is under the Unlicense licence.
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
|
||||||
|
|
||||||
{{- /* Then skipped tests are second */ -}}
|
{{- /* Then skipped tests are second */ -}}
|
||||||
{{- range . -}}
|
{{- range . -}}
|
||||||
{{- if eq .Result "SKIP" -}}
|
{{- if eq .Result "SKIP" -}}
|
||||||
|
@ -68,11 +67,10 @@ which is under the Unlicense licence.
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- /* and failing tests are last */ -}}
|
||||||
{{- /* Then passing tests are last */ -}}
|
|
||||||
{{- range . -}}
|
{{- range . -}}
|
||||||
{{- if eq .Result "PASS" -}}
|
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
||||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
||||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||||
{{- with .Coverage -}}
|
{{- with .Coverage -}}
|
||||||
, coverage: {{ . }}%
|
, coverage: {{ . }}%
|
||||||
|
|
|
@ -0,0 +1,128 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# Calculate the trial jobs to run based on if we're in a PR or not.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||||
|
|
||||||
|
# First calculate the various trial jobs.
|
||||||
|
#
|
||||||
|
# For each type of test we only run on Py3.7 on PRs
|
||||||
|
|
||||||
|
trial_sqlite_tests = [
|
||||||
|
{
|
||||||
|
"python-version": "3.7",
|
||||||
|
"database": "sqlite",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
if not IS_PR:
|
||||||
|
trial_sqlite_tests.extend(
|
||||||
|
{
|
||||||
|
"python-version": version,
|
||||||
|
"database": "sqlite",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
for version in ("3.8", "3.9", "3.10")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
trial_postgres_tests = [
|
||||||
|
{
|
||||||
|
"python-version": "3.7",
|
||||||
|
"database": "postgres",
|
||||||
|
"postgres-version": "10",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
if not IS_PR:
|
||||||
|
trial_postgres_tests.append(
|
||||||
|
{
|
||||||
|
"python-version": "3.10",
|
||||||
|
"database": "postgres",
|
||||||
|
"postgres-version": "14",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
trial_no_extra_tests = [
|
||||||
|
{
|
||||||
|
"python-version": "3.7",
|
||||||
|
"database": "sqlite",
|
||||||
|
"extras": "",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
print("::group::Calculated trial jobs")
|
||||||
|
print(
|
||||||
|
json.dumps(
|
||||||
|
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("::endgroup::")
|
||||||
|
|
||||||
|
test_matrix = json.dumps(
|
||||||
|
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
|
||||||
|
)
|
||||||
|
print(f"::set-output name=trial_test_matrix::{test_matrix}")
|
||||||
|
|
||||||
|
|
||||||
|
# First calculate the various sytest jobs.
|
||||||
|
#
|
||||||
|
# For each type of test we only run on focal on PRs
|
||||||
|
|
||||||
|
|
||||||
|
sytest_tests = [
|
||||||
|
{
|
||||||
|
"sytest-tag": "focal",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sytest-tag": "focal",
|
||||||
|
"postgres": "postgres",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sytest-tag": "focal",
|
||||||
|
"postgres": "multi-postgres",
|
||||||
|
"workers": "workers",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
if not IS_PR:
|
||||||
|
sytest_tests.extend(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"sytest-tag": "testing",
|
||||||
|
"postgres": "postgres",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sytest-tag": "buster",
|
||||||
|
"postgres": "multi-postgres",
|
||||||
|
"workers": "workers",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
print("::group::Calculated sytest jobs")
|
||||||
|
print(json.dumps(sytest_tests, indent=4))
|
||||||
|
print("::endgroup::")
|
||||||
|
|
||||||
|
test_matrix = json.dumps(sytest_tests)
|
||||||
|
print(f"::set-output name=sytest_test_matrix::{test_matrix}")
|
|
@ -0,0 +1,21 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# wraps `gotestfmt`, hiding output from successful packages unless
|
||||||
|
# all tests passed.
|
||||||
|
|
||||||
|
set -o pipefail
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# tee the test results to a log, whilst also piping them into gotestfmt,
|
||||||
|
# telling it to hide successful results, so that we can clearly see
|
||||||
|
# unsuccessful results.
|
||||||
|
tee complement.log | gotestfmt -hide successful-packages
|
||||||
|
|
||||||
|
# gotestfmt will exit non-zero if there were any failures, so if we got to this
|
||||||
|
# point, we must have had a successful result.
|
||||||
|
echo "All tests successful; showing all test results"
|
||||||
|
|
||||||
|
# Pipe the test results back through gotestfmt, showing all results.
|
||||||
|
# The log file consists of JSON lines giving the test results, interspersed
|
||||||
|
# with regular stdout lines (including reports of downloaded packages).
|
||||||
|
grep '^{"Time":' complement.log | gotestfmt
|
|
@ -163,7 +163,7 @@ jobs:
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||||
shell: bash
|
shell: bash
|
||||||
name: Run Complement Tests
|
name: Run Complement Tests
|
||||||
|
|
||||||
|
|
|
@ -73,53 +73,48 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- run: "true"
|
- run: "true"
|
||||||
|
|
||||||
trial:
|
calculate-test-jobs:
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
- id: get-matrix
|
||||||
|
run: .ci/scripts/calculate_jobs.py
|
||||||
|
outputs:
|
||||||
|
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
||||||
|
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||||
|
|
||||||
|
trial:
|
||||||
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
|
needs: calculate-test-jobs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
database: ["sqlite"]
|
|
||||||
extras: ["all"]
|
|
||||||
include:
|
|
||||||
# Newest Python without optional deps
|
|
||||||
- python-version: "3.10"
|
|
||||||
extras: ""
|
|
||||||
|
|
||||||
# Oldest Python with PostgreSQL
|
|
||||||
- python-version: "3.7"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "10"
|
|
||||||
extras: "all"
|
|
||||||
|
|
||||||
# Newest Python with newest PostgreSQL
|
|
||||||
- python-version: "3.10"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
extras: "all"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
run: |
|
run: |
|
||||||
docker run -d -p 5432:5432 \
|
docker run -d -p 5432:5432 \
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.job.python-version }}
|
||||||
extras: ${{ matrix.extras }}
|
extras: ${{ matrix.job.extras }}
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
timeout-minutes: 2
|
timeout-minutes: 2
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
run: until pg_isready -h localhost; do sleep 1; done
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: poetry run trial --jobs=2 tests
|
||||||
env:
|
env:
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
SYNAPSE_POSTGRES_HOST: localhost
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
@ -198,45 +193,24 @@ jobs:
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs: linting-done
|
needs: calculate-test-jobs
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
env:
|
env:
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
TOP: ${{ github.workspace }}
|
TOP: ${{ github.workspace }}
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: testing
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -252,7 +226,7 @@ jobs:
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
path: |
|
path: |
|
||||||
/logs/results.tap
|
/logs/results.tap
|
||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
|
@ -283,7 +257,6 @@ jobs:
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
|
|
||||||
|
@ -354,7 +327,7 @@ jobs:
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||||
shell: bash
|
shell: bash
|
||||||
name: Run Complement Tests
|
name: Run Complement Tests
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
name: Move new issues into the issue triage board
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [ opened ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
add_new_issues:
|
||||||
|
name: Add new issues to the triage board
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: octokit/graphql-action@v2.x
|
||||||
|
id: add_to_project
|
||||||
|
with:
|
||||||
|
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
||||||
|
query: |
|
||||||
|
mutation add_to_project($projectid:ID!,$contentid:ID!) {
|
||||||
|
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
|
||||||
|
item {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
projectid: ${{ env.PROJECT_ID }}
|
||||||
|
contentid: ${{ github.event.issue.node_id }}
|
||||||
|
env:
|
||||||
|
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
@ -0,0 +1,44 @@
|
||||||
|
name: Move labelled issues to correct projects
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [ labeled ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
move_needs_info:
|
||||||
|
name: Move X-Needs-Info on the triage board
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: >
|
||||||
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
|
steps:
|
||||||
|
- uses: octokit/graphql-action@v2.x
|
||||||
|
id: add_to_project
|
||||||
|
with:
|
||||||
|
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
||||||
|
query: |
|
||||||
|
mutation {
|
||||||
|
updateProjectV2ItemFieldValue(
|
||||||
|
input: {
|
||||||
|
projectId: $projectid
|
||||||
|
itemId: $contentid
|
||||||
|
fieldId: $fieldid
|
||||||
|
value: {
|
||||||
|
singleSelectOptionId: "Todo"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
projectV2Item {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
projectid: ${{ env.PROJECT_ID }}
|
||||||
|
contentid: ${{ github.event.issue.node_id }}
|
||||||
|
fieldid: ${{ env.FIELD_ID }}
|
||||||
|
optionid: ${{ env.OPTION_ID }}
|
||||||
|
env:
|
||||||
|
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
FIELD_ID: "PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4"
|
||||||
|
OPTION_ID: "ba22e43c"
|
|
@ -137,7 +137,7 @@ jobs:
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||||
shell: bash
|
shell: bash
|
||||||
name: Run Complement Tests
|
name: Run Complement Tests
|
||||||
|
|
||||||
|
|
46
CHANGES.md
46
CHANGES.md
|
@ -1,21 +1,39 @@
|
||||||
|
Synapse 1.66.0 (2022-08-31)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
No significant changes since 1.66.0rc2.
|
||||||
|
|
||||||
|
This release removes the ability for homeservers to delegate email ownership
|
||||||
|
verification and password reset confirmation to identity servers. This removal
|
||||||
|
was originally planned for Synapse 1.64, but was later deferred until now. See
|
||||||
|
the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
|
||||||
|
|
||||||
|
Deployments with multiple workers should note that the direct TCP replication
|
||||||
|
configuration was deprecated in Synapse v1.18.0 and will be removed in Synapse
|
||||||
|
v1.67.0. In particular, the TCP `replication` [listener](https://matrix-org.github.io/synapse/v1.66/usage/configuration/config_documentation.html#listeners)
|
||||||
|
type (not to be confused with the `replication` resource on the `http` listener
|
||||||
|
type) and the `worker_replication_port` config option will be removed .
|
||||||
|
|
||||||
|
To migrate to Redis, add the [`redis` config](https://matrix-org.github.io/synapse/v1.66/workers.html#shared-configuration),
|
||||||
|
then remove the TCP `replication` listener from config of the master and
|
||||||
|
`worker_replication_port` from worker config. Note that a HTTP listener with a
|
||||||
|
`replication` resource is still required. See the
|
||||||
|
[worker documentation](https://matrix-org.github.io/synapse/v1.66/workers.html)
|
||||||
|
for more details.
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.66.0rc2 (2022-08-30)
|
Synapse 1.66.0rc2 (2022-08-30)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
Bugfixes
|
Bugfixes
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Fix rate limit gauge metrics registering twice and misreporting (`synapse_rate_limit_sleep_affected_hosts`, `synapse_rate_limit_reject_affected_hosts`). ([\#13649](https://github.com/matrix-org/synapse/issues/13649))
|
- Fix a bug introduced in Synapse 1.66.0rc1 where the new rate limit metrics were misreported (`synapse_rate_limit_sleep_affected_hosts`, `synapse_rate_limit_reject_affected_hosts`). ([\#13649](https://github.com/matrix-org/synapse/issues/13649))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.66.0rc1 (2022-08-23)
|
Synapse 1.66.0rc1 (2022-08-23)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
This release removes the ability for homeservers to delegate email ownership
|
|
||||||
verification and password reset confirmation to identity servers. This removal
|
|
||||||
was originally planned for Synapse 1.64, but was later deferred until now.
|
|
||||||
|
|
||||||
See the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
|
|
||||||
|
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
@ -390,6 +408,20 @@ No significant changes since 1.62.0rc3.
|
||||||
|
|
||||||
Authors of spam-checker plugins should consult the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.62/docs/upgrade.md#upgrading-to-v1620) to learn about the enriched signatures for spam checker callbacks, which are supported with this release of Synapse.
|
Authors of spam-checker plugins should consult the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.62/docs/upgrade.md#upgrading-to-v1620) to learn about the enriched signatures for spam checker callbacks, which are supported with this release of Synapse.
|
||||||
|
|
||||||
|
## Security advisory
|
||||||
|
|
||||||
|
The following issue is fixed in 1.62.0.
|
||||||
|
|
||||||
|
* [GHSA-jhjh-776m-4765](https://github.com/matrix-org/synapse/security/advisories/GHSA-jhjh-776m-4765) / [CVE-2022-31152](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-31152)
|
||||||
|
|
||||||
|
Synapse instances prior to 1.62.0 did not implement the Matrix [event authorization rules](https://spec.matrix.org/v1.3/rooms/v10/#authorization-rules) correctly. An attacker could craft events which would be accepted by Synapse but not a spec-conformant server, potentially causing divergence in the room state between servers.
|
||||||
|
|
||||||
|
Homeservers with federation disabled via the [`federation_domain_whitelist`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_domain_whitelist) config option are unaffected.
|
||||||
|
|
||||||
|
Administrators of homeservers with federation enabled are advised to upgrade to v1.62.0 or higher.
|
||||||
|
|
||||||
|
Fixed by [#13087](https://github.com/matrix-org/synapse/pull/13087) and [#13088](https://github.com/matrix-org/synapse/pull/13088).
|
||||||
|
|
||||||
Synapse 1.62.0rc3 (2022-07-04)
|
Synapse 1.62.0rc3 (2022-07-04)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||||
=========================================================================
|
=========================================================================
|
||||||
|
|
||||||
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
|
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
|
||||||
maintained by the Matrix.org Foundation. We began rapid development began in 2014,
|
maintained by the Matrix.org Foundation. We began rapid development in 2014,
|
||||||
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
|
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
|
||||||
in earnest today.
|
in earnest today.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Drop support for calling `/_matrix/client/v3/rooms/{roomId}/invite` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu.
|
|
@ -0,0 +1 @@
|
||||||
|
Extend the release script to wait for GitHub Actions to finish and to be usable as a guide for the whole process.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a bug introduced in Synapse 1.13 where the [List Rooms admin API](https://matrix-org.github.io/synapse/develop/admin_api/rooms.html#list-room-api) would return integers instead of booleans for the `federatable` and `public` fields when using a Sqlite database.
|
|
@ -0,0 +1 @@
|
||||||
|
Add experimental configuration option to allow disabling legacy Prometheus metric names.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix bug that user cannot `/forget` rooms after the last member has left the room.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove redundant `_get_joined_users_from_context` cache. Contributed by Nick @ Beeper (@fizzadar).
|
|
@ -0,0 +1 @@
|
||||||
|
Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar).
|
|
@ -0,0 +1 @@
|
||||||
|
Optimize how Synapse calculates domains to fetch from during backfill.
|
|
@ -0,0 +1 @@
|
||||||
|
Faster Room Joins: fix `/make_knock` blocking indefinitely when the room in question is a partial-stated room.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix loading the current stream position behind the actual position.
|
|
@ -0,0 +1 @@
|
||||||
|
Comment about a better future where we can get the state diff between two events.
|
|
@ -0,0 +1 @@
|
||||||
|
Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve performance of `@cachedList`.
|
|
@ -0,0 +1 @@
|
||||||
|
Minor speed up of fetching large numbers of push rules.
|
|
@ -0,0 +1 @@
|
||||||
|
Optimise push action fetching queries. Contributed by Nick @ Beeper (@fizzadar).
|
|
@ -0,0 +1 @@
|
||||||
|
Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar).
|
|
@ -0,0 +1 @@
|
||||||
|
Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse.
|
|
@ -0,0 +1 @@
|
||||||
|
Rename `event_map` to `unpersisted_events` when computing the auth differences.
|
|
@ -0,0 +1 @@
|
||||||
|
Refactor `get_users_in_room(room_id)` mis-use with dedicated `get_current_hosts_in_room(room_id)` function.
|
|
@ -0,0 +1 @@
|
||||||
|
Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating `join_authorised_via_users_server` of a `/make_join` request.
|
|
@ -0,0 +1 @@
|
||||||
|
Refactor `get_users_in_room(room_id)` mis-use to lookup single local user with dedicated `check_local_user_in_room(...)` function.
|
|
@ -0,0 +1 @@
|
||||||
|
Support setting the registration shared secret in a file, via a new `registration_shared_secret_path` configuration option.
|
|
@ -0,0 +1 @@
|
||||||
|
Change the default startup behaviour so that any missing "additional" configuration files (signing key, etc) are generated automatically.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a longstanding bug in `register_new_matrix_user` which meant it was always necessary to explicitly give a server URL.
|
|
@ -0,0 +1 @@
|
||||||
|
Document how ["monthly active users"](https://matrix-org.github.io/synapse/latest/usage/administration/monthly_active_users.html) is calculated and used.
|
|
@ -0,0 +1 @@
|
||||||
|
Drop unused column `application_services_state.last_txn`.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix the running of MSC1763 retention purge_jobs in deployments with background jobs running on a worker by forcing them back onto the main worker. Contributed by Brad @ Beeper.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve performance of sending messages in rooms with thousands of local users.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve readability of Complement CI logs by printing failure results last.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve documentation around user registration.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove documentation of legacy `frontend_proxy` worker app.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove the ability to use direct TCP replication with workers. Direct TCP replication was deprecated in Synapse v1.18.0. Workers now require using Redis.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285).
|
|
@ -0,0 +1 @@
|
||||||
|
Clarify documentation that HTTP replication traffic can be protected with a shared secret.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a long-standing bug that downloaded media for URL previews was not deleted while database background updates were running.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix MSC3030 `/timestamp_to_event` endpoint to return the correct next event when the events have the same timestamp.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix bug where we wedge media plugins if clients disconnect early. Introduced in v1.22.0.
|
|
@ -0,0 +1 @@
|
||||||
|
Generalise the `@cancellable` annotation so it can be used on functions other than just servlet methods.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove unintentional colons from [config manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html) headers.
|
|
@ -0,0 +1 @@
|
||||||
|
Introduce a `CommonUsageMetrics` class to share some usage metrics between the Prometheus exporter and the phone home stats.
|
|
@ -0,0 +1 @@
|
||||||
|
Update docs to make enabling metrics more clear.
|
|
@ -0,0 +1 @@
|
||||||
|
Add some logging to help track down #13444.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a long-standing bug which meant that keys for unwhitelisted servers were not returned by `/_matrix/key/v2/query`.
|
|
@ -0,0 +1 @@
|
||||||
|
Update docker image to use a stable version of poetry.
|
|
@ -0,0 +1 @@
|
||||||
|
Update poetry lock file for v1.2.0.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285).
|
|
@ -0,0 +1 @@
|
||||||
|
Add cache to `is_partial_state_room`.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a bug introduced in Synapse v1.20.0 that would cause the unstable unread counts from [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) to be calculated even if the feature is disabled.
|
|
@ -0,0 +1 @@
|
||||||
|
Update the Grafana dashboard that is included with Synapse in the `contrib` directory.
|
|
@ -0,0 +1 @@
|
||||||
|
Only run trial CI on all python versions on non-PRs.
|
|
@ -0,0 +1 @@
|
||||||
|
Clarify `(room_id, event_id)` global uniqueness and how we should scope our database schemas.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix typechecking with latest types-jsonschema.
|
|
@ -0,0 +1 @@
|
||||||
|
Reduce number of CI checks we run for PRs.
|
|
@ -3244,6 +3244,104 @@
|
||||||
"yaxis": {
|
"yaxis": {
|
||||||
"align": false
|
"align": false
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"description": "Average number of hosts being rate limited across each worker type.",
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "palette-classic"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"axisLabel": "",
|
||||||
|
"axisPlacement": "auto",
|
||||||
|
"barAlignment": 0,
|
||||||
|
"drawStyle": "line",
|
||||||
|
"fillOpacity": 0,
|
||||||
|
"gradientMode": "none",
|
||||||
|
"hideFrom": {
|
||||||
|
"legend": false,
|
||||||
|
"tooltip": false,
|
||||||
|
"viz": false
|
||||||
|
},
|
||||||
|
"lineInterpolation": "linear",
|
||||||
|
"lineWidth": 1,
|
||||||
|
"pointSize": 5,
|
||||||
|
"scaleDistribution": {
|
||||||
|
"type": "linear"
|
||||||
|
},
|
||||||
|
"showPoints": "auto",
|
||||||
|
"spanNulls": false,
|
||||||
|
"stacking": {
|
||||||
|
"group": "A",
|
||||||
|
"mode": "none"
|
||||||
|
},
|
||||||
|
"thresholdsStyle": {
|
||||||
|
"mode": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": [],
|
||||||
|
"thresholds": {
|
||||||
|
"mode": "absolute",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"color": "green"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unit": "none"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 9,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 53
|
||||||
|
},
|
||||||
|
"id": 225,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": [],
|
||||||
|
"displayMode": "list",
|
||||||
|
"placement": "bottom"
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"mode": "single",
|
||||||
|
"sort": "desc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"editorMode": "builder",
|
||||||
|
"expr": "avg by(job, rate_limiter_name) (synapse_rate_limit_sleep_affected_hosts{instance=\"$instance\", job=~\"$job\", index=~\"$index\"})",
|
||||||
|
"hide": false,
|
||||||
|
"legendFormat": "Slept by {{job}}:{{rate_limiter_name}}",
|
||||||
|
"range": true,
|
||||||
|
"refId": "B"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"editorMode": "builder",
|
||||||
|
"expr": "avg by(job, rate_limiter_name) (synapse_rate_limit_reject_affected_hosts{instance=\"$instance\", job=~\"$job\", index=~\"$index\"})",
|
||||||
|
"legendFormat": "Rejected by {{job}}:{{rate_limiter_name}}",
|
||||||
|
"range": true,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Hosts being rate limited",
|
||||||
|
"type": "timeseries"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"targets": [
|
"targets": [
|
||||||
|
@ -6404,7 +6502,7 @@
|
||||||
"h": 13,
|
"h": 13,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 10
|
"y": 35
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 12,
|
"id": 12,
|
||||||
|
@ -6502,7 +6600,7 @@
|
||||||
"h": 13,
|
"h": 13,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 10
|
"y": 35
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 26,
|
"id": 26,
|
||||||
|
@ -6601,7 +6699,7 @@
|
||||||
"h": 13,
|
"h": 13,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 23
|
"y": 48
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 13,
|
"id": 13,
|
||||||
|
@ -6705,7 +6803,7 @@
|
||||||
"h": 13,
|
"h": 13,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 23
|
"y": 48
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 27,
|
"id": 27,
|
||||||
|
@ -6803,7 +6901,7 @@
|
||||||
"h": 13,
|
"h": 13,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 36
|
"y": 61
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 28,
|
"id": 28,
|
||||||
|
@ -6900,7 +6998,7 @@
|
||||||
"h": 13,
|
"h": 13,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 36
|
"y": 61
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 25,
|
"id": 25,
|
||||||
|
@ -6935,7 +7033,7 @@
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "$datasource"
|
"uid": "$datasource"
|
||||||
},
|
},
|
||||||
"expr": "rate(synapse_util_metrics_block_time_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count[$bucket_size])",
|
"expr": "rate(synapse_util_metrics_block_time_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
|
@ -6960,11 +7058,13 @@
|
||||||
},
|
},
|
||||||
"yaxes": [
|
"yaxes": [
|
||||||
{
|
{
|
||||||
"format": "ms",
|
"$$hashKey": "object:180",
|
||||||
|
"format": "s",
|
||||||
"logBase": 1,
|
"logBase": 1,
|
||||||
"show": true
|
"show": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"$$hashKey": "object:181",
|
||||||
"format": "short",
|
"format": "short",
|
||||||
"logBase": 1,
|
"logBase": 1,
|
||||||
"show": true
|
"show": true
|
||||||
|
@ -6988,7 +7088,7 @@
|
||||||
"h": 15,
|
"h": 15,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 49
|
"y": 74
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 154,
|
"id": 154,
|
||||||
|
@ -7009,7 +7109,7 @@
|
||||||
"alertThreshold": true
|
"alertThreshold": true
|
||||||
},
|
},
|
||||||
"percentage": false,
|
"percentage": false,
|
||||||
"pluginVersion": "8.4.3",
|
"pluginVersion": "9.0.4",
|
||||||
"pointradius": 2,
|
"pointradius": 2,
|
||||||
"points": false,
|
"points": false,
|
||||||
"renderer": "flot",
|
"renderer": "flot",
|
||||||
|
@ -7109,7 +7209,7 @@
|
||||||
"h": 10,
|
"h": 10,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 36
|
"y": 69
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
@ -7211,7 +7311,7 @@
|
||||||
"h": 10,
|
"h": 10,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 36
|
"y": 69
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 8,
|
"id": 8,
|
||||||
|
@ -7311,7 +7411,7 @@
|
||||||
"h": 10,
|
"h": 10,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 46
|
"y": 79
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 38,
|
"id": 38,
|
||||||
|
@ -7407,7 +7507,7 @@
|
||||||
"h": 10,
|
"h": 10,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 12,
|
"x": 12,
|
||||||
"y": 46
|
"y": 79
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 39,
|
"id": 39,
|
||||||
|
@ -7415,11 +7515,16 @@
|
||||||
"alignAsTable": true,
|
"alignAsTable": true,
|
||||||
"avg": false,
|
"avg": false,
|
||||||
"current": false,
|
"current": false,
|
||||||
"max": false,
|
"hideEmpty": false,
|
||||||
|
"hideZero": false,
|
||||||
|
"max": true,
|
||||||
"min": false,
|
"min": false,
|
||||||
|
"rightSide": false,
|
||||||
"show": true,
|
"show": true,
|
||||||
|
"sort": "max",
|
||||||
|
"sortDesc": true,
|
||||||
"total": false,
|
"total": false,
|
||||||
"values": false
|
"values": true
|
||||||
},
|
},
|
||||||
"lines": true,
|
"lines": true,
|
||||||
"linewidth": 1,
|
"linewidth": 1,
|
||||||
|
@ -7467,11 +7572,13 @@
|
||||||
},
|
},
|
||||||
"yaxes": [
|
"yaxes": [
|
||||||
{
|
{
|
||||||
|
"$$hashKey": "object:101",
|
||||||
"format": "rps",
|
"format": "rps",
|
||||||
"logBase": 1,
|
"logBase": 1,
|
||||||
"show": true
|
"show": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"$$hashKey": "object:102",
|
||||||
"format": "short",
|
"format": "short",
|
||||||
"logBase": 1,
|
"logBase": 1,
|
||||||
"show": true
|
"show": true
|
||||||
|
@ -7501,7 +7608,7 @@
|
||||||
"h": 9,
|
"h": 9,
|
||||||
"w": 12,
|
"w": 12,
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 56
|
"y": 89
|
||||||
},
|
},
|
||||||
"hiddenSeries": false,
|
"hiddenSeries": false,
|
||||||
"id": 65,
|
"id": 65,
|
||||||
|
@ -11757,8 +11864,8 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"time": {
|
"time": {
|
||||||
"from": "2022-07-22T04:08:13.716Z",
|
"from": "now-3h",
|
||||||
"to": "2022-07-22T18:44:27.863Z"
|
"to": "now"
|
||||||
},
|
},
|
||||||
"timepicker": {
|
"timepicker": {
|
||||||
"now": true,
|
"now": true,
|
||||||
|
@ -11789,6 +11896,6 @@
|
||||||
"timezone": "",
|
"timezone": "",
|
||||||
"title": "Synapse",
|
"title": "Synapse",
|
||||||
"uid": "000000012",
|
"uid": "000000012",
|
||||||
"version": 124,
|
"version": 132,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
|
@ -36,7 +36,7 @@ TEMP_VENV="$(mktemp -d)"
|
||||||
python3 -m venv "$TEMP_VENV"
|
python3 -m venv "$TEMP_VENV"
|
||||||
source "$TEMP_VENV/bin/activate"
|
source "$TEMP_VENV/bin/activate"
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
pip install poetry==1.2.0b1
|
pip install poetry==1.2.0
|
||||||
poetry export \
|
poetry export \
|
||||||
--extras all \
|
--extras all \
|
||||||
--extras test \
|
--extras test \
|
||||||
|
|
|
@ -1,3 +1,24 @@
|
||||||
|
matrix-synapse-py3 (1.66.0ubuntu1) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
|
* Use stable poetry 1.2.0 version, rather than a prerelease.
|
||||||
|
|
||||||
|
-- Erik Johnston <erik@matrix.org> Thu, 01 Sep 2022 13:48:31 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.66.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.66.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 31 Aug 2022 11:20:17 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.66.0~rc2+nmu1) UNRELEASED; urgency=medium
|
||||||
|
|
||||||
|
* Update debhelper to compatibility level 12.
|
||||||
|
* Drop the preinst script stopping synapse.
|
||||||
|
* Allocate a group for the system user.
|
||||||
|
* Change dpkg-statoverride to --force-statoverride-add.
|
||||||
|
|
||||||
|
-- Jörg Behrmann <behrmann@physik.fu-berlin.de> Tue, 23 Aug 2022 17:17:00 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.66.0~rc2) stable; urgency=medium
|
matrix-synapse-py3 (1.66.0~rc2) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.66.0rc2.
|
* New Synapse release 1.66.0rc2.
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
10
|
|
|
@ -4,7 +4,7 @@ Priority: extra
|
||||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||||
Build-Depends:
|
Build-Depends:
|
||||||
debhelper (>= 10),
|
debhelper-compat (= 12),
|
||||||
dh-virtualenv (>= 1.1),
|
dh-virtualenv (>= 1.1),
|
||||||
libsystemd-dev,
|
libsystemd-dev,
|
||||||
libpq-dev,
|
libpq-dev,
|
||||||
|
|
|
@ -40,12 +40,12 @@ EOF
|
||||||
/opt/venvs/matrix-synapse/lib/manage_debconf.pl update
|
/opt/venvs/matrix-synapse/lib/manage_debconf.pl update
|
||||||
|
|
||||||
if ! getent passwd $USER >/dev/null; then
|
if ! getent passwd $USER >/dev/null; then
|
||||||
adduser --quiet --system --no-create-home --home /var/lib/matrix-synapse $USER
|
adduser --quiet --system --group --no-create-home --home /var/lib/matrix-synapse $USER
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do
|
for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do
|
||||||
if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then
|
if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then
|
||||||
dpkg-statoverride --force --quiet --update --add $USER nogroup 0755 $DIR
|
dpkg-statoverride --force-statoverride-add --quiet --update --add $USER "$(id -gn $USER)" 0755 $DIR
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
#!/bin/sh -e
|
|
||||||
|
|
||||||
# Attempt to undo some of the braindamage caused by
|
|
||||||
# https://github.com/matrix-org/package-synapse-debian/issues/18.
|
|
||||||
#
|
|
||||||
# Due to reasons [1], the old python2 matrix-synapse package will not stop the
|
|
||||||
# service when the package is uninstalled. Our maintainer scripts will do the
|
|
||||||
# right thing in terms of ensuring the service is enabled and unmasked, but
|
|
||||||
# then do a `systemctl start matrix-synapse`, which of course does nothing -
|
|
||||||
# leaving the old (py2) service running.
|
|
||||||
#
|
|
||||||
# There should normally be no reason for the service to be running during our
|
|
||||||
# preinst, so we assume that if it *is* running, it's due to that situation,
|
|
||||||
# and stop it.
|
|
||||||
#
|
|
||||||
# [1] dh_systemd_start doesn't do anything because it sees that there is an
|
|
||||||
# init.d script with the same name, so leaves it to dh_installinit.
|
|
||||||
#
|
|
||||||
# dh_installinit doesn't do anything because somebody gave it a --no-start
|
|
||||||
# for unknown reasons.
|
|
||||||
|
|
||||||
if [ -x /bin/systemctl ]; then
|
|
||||||
if /bin/systemctl --quiet is-active -- matrix-synapse; then
|
|
||||||
echo >&2 "stopping existing matrix-synapse service"
|
|
||||||
/bin/systemctl stop matrix-synapse || true
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
#DEBHELPER#
|
|
||||||
|
|
||||||
exit 0
|
|
|
@ -1,2 +0,0 @@
|
||||||
# Specify environment variables used when running Synapse
|
|
||||||
# SYNAPSE_CACHE_FACTOR=0.5 (default)
|
|
|
@ -5,7 +5,6 @@ Description=Synapse Matrix homeserver
|
||||||
Type=notify
|
Type=notify
|
||||||
User=matrix-synapse
|
User=matrix-synapse
|
||||||
WorkingDirectory=/var/lib/matrix-synapse
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
EnvironmentFile=-/etc/default/matrix-synapse
|
|
||||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||||
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
||||||
ExecReload=/bin/kill -HUP $MAINPID
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
@ -13,5 +12,10 @@ Restart=always
|
||||||
RestartSec=3
|
RestartSec=3
|
||||||
SyslogIdentifier=matrix-synapse
|
SyslogIdentifier=matrix-synapse
|
||||||
|
|
||||||
|
# The environment file is not shipped by default anymore and the below directive
|
||||||
|
# is for backwards compatibility only. Please use your homeserver.yaml if
|
||||||
|
# possible.
|
||||||
|
EnvironmentFile=-/etc/default/matrix-synapse
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|
|
@ -6,15 +6,17 @@
|
||||||
# assume we only have one package
|
# assume we only have one package
|
||||||
PACKAGE_NAME:=`dh_listpackages`
|
PACKAGE_NAME:=`dh_listpackages`
|
||||||
|
|
||||||
override_dh_systemd_enable:
|
override_dh_installsystemd:
|
||||||
dh_systemd_enable --name=matrix-synapse
|
dh_installsystemd --name=matrix-synapse
|
||||||
|
|
||||||
override_dh_installinit:
|
|
||||||
dh_installinit --name=matrix-synapse
|
|
||||||
|
|
||||||
# we don't really want to strip the symbols from our object files.
|
# we don't really want to strip the symbols from our object files.
|
||||||
override_dh_strip:
|
override_dh_strip:
|
||||||
|
|
||||||
|
# many libraries pulled from PyPI have allocatable sections after
|
||||||
|
# non-allocatable ones on which dwz errors out. For those without the issue the
|
||||||
|
# gains are only marginal
|
||||||
|
override_dh_dwz:
|
||||||
|
|
||||||
# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
|
# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
|
||||||
# (executables and shared libs) in the package, and looks for the shared
|
# (executables and shared libs) in the package, and looks for the shared
|
||||||
# libraries that they depend on. It then adds a dependency on the package that
|
# libraries that they depend on. It then adds a dependency on the package that
|
||||||
|
|
|
@ -46,17 +46,8 @@ RUN \
|
||||||
|
|
||||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||||
# synapse's dependencies.
|
# synapse's dependencies.
|
||||||
# We use a specific commit from poetry's master branch instead of our usual 1.1.14,
|
|
||||||
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
|
||||||
# https://github.com/python-poetry/poetry/pull/5156 and
|
|
||||||
# https://github.com/python-poetry/poetry/issues/5141 ;
|
|
||||||
# without it, we generate a requirements.txt with incorrect environment markers,
|
|
||||||
# which causes necessary packages to be omitted when we `pip install`.
|
|
||||||
#
|
|
||||||
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
|
||||||
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
pip install --user "poetry-core==1.1.0a7" "git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5"
|
pip install --user "poetry==1.2.0"
|
||||||
|
|
||||||
WORKDIR /synapse
|
WORKDIR /synapse
|
||||||
|
|
||||||
|
|
|
@ -69,6 +69,7 @@
|
||||||
- [Manhole](manhole.md)
|
- [Manhole](manhole.md)
|
||||||
- [Monitoring](metrics-howto.md)
|
- [Monitoring](metrics-howto.md)
|
||||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||||
|
- [Monthly Active Users](usage/administration/monthly_active_users.md)
|
||||||
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
|
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
|
||||||
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
|
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
|
||||||
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)
|
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)
|
||||||
|
|
|
@ -5,9 +5,9 @@ non-interactive way. This is generally used for bootstrapping a Synapse
|
||||||
instance with administrator accounts.
|
instance with administrator accounts.
|
||||||
|
|
||||||
To authenticate yourself to the server, you will need both the shared secret
|
To authenticate yourself to the server, you will need both the shared secret
|
||||||
(`registration_shared_secret` in the homeserver configuration), and a
|
([`registration_shared_secret`](../configuration/config_documentation.md#registration_shared_secret)
|
||||||
one-time nonce. If the registration shared secret is not configured, this API
|
in the homeserver configuration), and a one-time nonce. If the registration
|
||||||
is not enabled.
|
shared secret is not configured, this API is not enabled.
|
||||||
|
|
||||||
To fetch the nonce, you need to request one from the API:
|
To fetch the nonce, you need to request one from the API:
|
||||||
|
|
||||||
|
|
|
@ -34,13 +34,45 @@ the process of indexing it).
|
||||||
## Chain Cover Index
|
## Chain Cover Index
|
||||||
|
|
||||||
Synapse computes auth chain differences by pre-computing a "chain cover" index
|
Synapse computes auth chain differences by pre-computing a "chain cover" index
|
||||||
for the auth chain in a room, allowing efficient reachability queries like "is
|
for the auth chain in a room, allowing us to efficiently make reachability queries
|
||||||
event A in the auth chain of event B". This is done by assigning every event a
|
like "is event `A` in the auth chain of event `B`?". We could do this with an index
|
||||||
*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links*
|
that tracks all pairs `(A, B)` such that `A` is in the auth chain of `B`. However, this
|
||||||
between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A`
|
would be prohibitively large, scaling poorly as the room accumulates more state
|
||||||
is in the auth chain of `B`) if and only if either:
|
events.
|
||||||
|
|
||||||
1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s
|
Instead, we break down the graph into *chains*. A chain is a subset of a DAG
|
||||||
|
with the following property: for any pair of events `E` and `F` in the chain,
|
||||||
|
the chain contains a path `E -> F` or a path `F -> E`. This forces a chain to be
|
||||||
|
linear (without forks), e.g. `E -> F -> G -> ... -> H`. Each event in the chain
|
||||||
|
is given a *sequence number* local to that chain. The oldest event `E` in the
|
||||||
|
chain has sequence number 1. If `E` has a child `F` in the chain, then `F` has
|
||||||
|
sequence number 2. If `E` has a grandchild `G` in the chain, then `G` has
|
||||||
|
sequence number 3; and so on.
|
||||||
|
|
||||||
|
Synapse ensures that each persisted event belongs to exactly one chain, and
|
||||||
|
tracks how the chains are connected to one another. This allows us to
|
||||||
|
efficiently answer reachability queries. Doing so uses less storage than
|
||||||
|
tracking reachability on an event-by-event basis, particularly when we have
|
||||||
|
fewer and longer chains. See
|
||||||
|
|
||||||
|
> Jagadish, H. (1990). [A compression technique to materialize transitive closure](https://doi.org/10.1145/99935.99944).
|
||||||
|
> *ACM Transactions on Database Systems (TODS)*, 15*(4)*, 558-598.
|
||||||
|
|
||||||
|
for the original idea or
|
||||||
|
|
||||||
|
> Y. Chen, Y. Chen, [An efficient algorithm for answering graph
|
||||||
|
> reachability queries](https://doi.org/10.1109/ICDE.2008.4497498),
|
||||||
|
> in: 2008 IEEE 24th International Conference on Data Engineering, April 2008,
|
||||||
|
> pp. 893–902. (PDF available via [Google Scholar](https://scholar.google.com/scholar?q=Y.%20Chen,%20Y.%20Chen,%20An%20efficient%20algorithm%20for%20answering%20graph%20reachability%20queries,%20in:%202008%20IEEE%2024th%20International%20Conference%20on%20Data%20Engineering,%20April%202008,%20pp.%20893902.).)
|
||||||
|
|
||||||
|
for a more modern take.
|
||||||
|
|
||||||
|
In practical terms, the chain cover assigns every event a
|
||||||
|
*chain ID* and *sequence number* (e.g. `(5,3)`), and maintains a map of *links*
|
||||||
|
between events in chains (e.g. `(5,3) -> (2,4)`) such that `A` is reachable by `B`
|
||||||
|
(i.e. `A` is in the auth chain of `B`) if and only if either:
|
||||||
|
|
||||||
|
1. `A` and `B` have the same chain ID and `A`'s sequence number is less than `B`'s
|
||||||
sequence number; or
|
sequence number; or
|
||||||
2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
|
2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
|
||||||
`L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
|
`L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
|
||||||
|
@ -49,8 +81,9 @@ There are actually two potential implementations, one where we store links from
|
||||||
each chain to every other reachable chain (the transitive closure of the links
|
each chain to every other reachable chain (the transitive closure of the links
|
||||||
graph), and one where we remove redundant links (the transitive reduction of the
|
graph), and one where we remove redundant links (the transitive reduction of the
|
||||||
links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
|
links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
|
||||||
would not be stored. Synapse uses the former implementations so that it doesn't
|
would not be stored. Synapse uses the former implementation so that it doesn't
|
||||||
need to recurse to test reachability between chains.
|
need to recurse to test reachability between chains. This trades-off extra storage
|
||||||
|
in order to save CPU cycles and DB queries.
|
||||||
|
|
||||||
### Example
|
### Example
|
||||||
|
|
||||||
|
|
|
@ -62,6 +62,8 @@ pipx install poetry
|
||||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||||
for other installation methods.
|
for other installation methods.
|
||||||
|
|
||||||
|
Synapse requires Poetry version 1.2.0 or later.
|
||||||
|
|
||||||
Next, open a terminal and install dependencies as follows:
|
Next, open a terminal and install dependencies as follows:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
|
|
|
@ -191,3 +191,27 @@ There are three separate aspects to this:
|
||||||
flavour will be accepted by SQLite 3.22, but will give a column whose
|
flavour will be accepted by SQLite 3.22, but will give a column whose
|
||||||
default value is the **string** `"FALSE"` - which, when cast back to a boolean
|
default value is the **string** `"FALSE"` - which, when cast back to a boolean
|
||||||
in Python, evaluates to `True`.
|
in Python, evaluates to `True`.
|
||||||
|
|
||||||
|
|
||||||
|
## `event_id` global uniqueness
|
||||||
|
|
||||||
|
In room versions `1` and `2` it's possible to end up with two events with the
|
||||||
|
same `event_id` (in the same or different rooms). After room version `3`, that
|
||||||
|
can only happen with a hash collision, which we basically hope will never
|
||||||
|
happen.
|
||||||
|
|
||||||
|
There are several places in Synapse and even Matrix APIs like [`GET
|
||||||
|
/_matrix/federation/v1/event/{eventId}`](https://spec.matrix.org/v1.1/server-server-api/#get_matrixfederationv1eventeventid)
|
||||||
|
where we assume that event IDs are globally unique.
|
||||||
|
|
||||||
|
But hash collisions are still possible, and by treating event IDs as room
|
||||||
|
scoped, we can reduce the possibility of a hash collision. When scoping
|
||||||
|
`event_id` in the database schema, it should be also accompanied by `room_id`
|
||||||
|
(`PRIMARY KEY (room_id, event_id)`) and lookups should be done through the pair
|
||||||
|
`(room_id, event_id)`.
|
||||||
|
|
||||||
|
There has been a lot of debate on this in places like
|
||||||
|
https://github.com/matrix-org/matrix-spec-proposals/issues/2779 and
|
||||||
|
[MSC2848](https://github.com/matrix-org/matrix-spec-proposals/pull/2848) which
|
||||||
|
has no resolution yet (as of 2022-09-01).
|
||||||
|
|
||||||
|
|
|
@ -243,14 +243,11 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||||
|
|
||||||
## Check the version of poetry with `poetry --version`.
|
## Check the version of poetry with `poetry --version`.
|
||||||
|
|
||||||
At the time of writing, the 1.2 series is beta only. We have seen some examples
|
The minimum version of poetry supported by Synapse is 1.2.
|
||||||
where the lockfiles generated by 1.2 prereleasese aren't interpreted correctly
|
|
||||||
by poetry 1.1.x. For now, use poetry 1.1.14, which includes a critical
|
|
||||||
[change](https://github.com/python-poetry/poetry/pull/5973) needed to remain
|
|
||||||
[compatible with PyPI](https://github.com/pypi/warehouse/pull/11775).
|
|
||||||
|
|
||||||
It can also be useful to check the version of `poetry-core` in use. If you've
|
It can also be useful to check the version of `poetry-core` in use. If you've
|
||||||
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep poetry-core`.
|
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep
|
||||||
|
poetry-core`.
|
||||||
|
|
||||||
## Clear caches: `poetry cache clear --all pypi`.
|
## Clear caches: `poetry cache clear --all pypi`.
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,13 @@
|
||||||
|
|
||||||
1. Enable Synapse metrics:
|
1. Enable Synapse metrics:
|
||||||
|
|
||||||
There are two methods of enabling metrics in Synapse.
|
In `homeserver.yaml`, make sure `enable_metrics` is
|
||||||
|
set to `True`.
|
||||||
|
|
||||||
|
1. Enable the `/_synapse/metrics` Synapse endpoint that Prometheus uses to
|
||||||
|
collect data:
|
||||||
|
|
||||||
|
There are two methods of enabling the metrics endpoint in Synapse.
|
||||||
|
|
||||||
The first serves the metrics as a part of the usual web server and
|
The first serves the metrics as a part of the usual web server and
|
||||||
can be enabled by adding the \"metrics\" resource to the existing
|
can be enabled by adding the \"metrics\" resource to the existing
|
||||||
|
@ -41,9 +47,6 @@
|
||||||
- '0.0.0.0'
|
- '0.0.0.0'
|
||||||
```
|
```
|
||||||
|
|
||||||
For both options, you will need to ensure that `enable_metrics` is
|
|
||||||
set to `True`.
|
|
||||||
|
|
||||||
1. Restart Synapse.
|
1. Restart Synapse.
|
||||||
|
|
||||||
1. Add a Prometheus target for Synapse.
|
1. Add a Prometheus target for Synapse.
|
||||||
|
|
|
@ -174,7 +174,9 @@ oidc_providers:
|
||||||
|
|
||||||
1. Create a regular web application for Synapse
|
1. Create a regular web application for Synapse
|
||||||
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
|
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||||
3. Add a rule to add the `preferred_username` claim.
|
3. Add a rule with any name to add the `preferred_username` claim.
|
||||||
|
(See https://auth0.com/docs/customize/rules/create-rules for more information on how to create rules.)
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>Code sample</summary>
|
<summary>Code sample</summary>
|
||||||
|
|
||||||
|
|
|
@ -506,9 +506,13 @@ email will be disabled.
|
||||||
|
|
||||||
### Registering a user
|
### Registering a user
|
||||||
|
|
||||||
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
One way to create a new user is to do so from a client like
|
||||||
|
[Element](https://element.io/). This requires registration to be enabled via
|
||||||
|
the
|
||||||
|
[`enable_registration`](../usage/configuration/config_documentation.md#enable_registration)
|
||||||
|
setting.
|
||||||
|
|
||||||
Alternatively, you can do so from the command line. This can be done as follows:
|
Alternatively, you can create new users from the command line. This can be done as follows:
|
||||||
|
|
||||||
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
|
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
|
||||||
installed via a prebuilt package, `register_new_matrix_user` should already be
|
installed via a prebuilt package, `register_new_matrix_user` should already be
|
||||||
|
@ -520,7 +524,7 @@ Alternatively, you can do so from the command line. This can be done as follows:
|
||||||
```
|
```
|
||||||
2. Run the following command:
|
2. Run the following command:
|
||||||
```sh
|
```sh
|
||||||
register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
register_new_matrix_user -c homeserver.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
This will prompt you to add details for the new user, and will then connect to
|
This will prompt you to add details for the new user, and will then connect to
|
||||||
|
@ -533,12 +537,13 @@ Make admin [no]:
|
||||||
Success!
|
Success!
|
||||||
```
|
```
|
||||||
|
|
||||||
This process uses a setting `registration_shared_secret` in
|
This process uses a setting
|
||||||
`homeserver.yaml`, which is shared between Synapse itself and the
|
[`registration_shared_secret`](../usage/configuration/config_documentation.md#registration_shared_secret),
|
||||||
`register_new_matrix_user` script. It doesn't matter what it is (a random
|
which is shared between Synapse itself and the `register_new_matrix_user`
|
||||||
value is generated by `--generate-config`), but it should be kept secret, as
|
script. It doesn't matter what it is (a random value is generated by
|
||||||
anyone with knowledge of it can register users, including admin accounts,
|
`--generate-config`), but it should be kept secret, as anyone with knowledge of
|
||||||
on your server even if `enable_registration` is `false`.
|
it can register users, including admin accounts, on your server even if
|
||||||
|
`enable_registration` is `false`.
|
||||||
|
|
||||||
### Setting up a TURN server
|
### Setting up a TURN server
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,8 @@ worker_name: generic_worker1
|
||||||
worker_replication_host: 127.0.0.1
|
worker_replication_host: 127.0.0.1
|
||||||
worker_replication_http_port: 9093
|
worker_replication_http_port: 9093
|
||||||
|
|
||||||
|
worker_main_http_uri: http://localhost:8008/
|
||||||
|
|
||||||
worker_listeners:
|
worker_listeners:
|
||||||
- type: http
|
- type: http
|
||||||
port: 8083
|
port: 8083
|
||||||
|
|
|
@ -89,6 +89,28 @@ process, for example:
|
||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Upgrading to v1.67.0
|
||||||
|
|
||||||
|
## Direct TCP replication is no longer supported: migrate to Redis
|
||||||
|
|
||||||
|
Redis support was added in v1.13.0 with it becoming the recommended method in
|
||||||
|
v1.18.0. It replaced the old direct TCP connections (which was deprecated as of
|
||||||
|
v1.18.0) to the main process. With Redis, rather than all the workers connecting
|
||||||
|
to the main process, all the workers and the main process connect to Redis,
|
||||||
|
which relays replication commands between processes. This can give a significant
|
||||||
|
CPU saving on the main process and is a prerequisite for upcoming
|
||||||
|
performance improvements.
|
||||||
|
|
||||||
|
To migrate to Redis add the [`redis` config](./workers.md#shared-configuration),
|
||||||
|
and remove the TCP `replication` listener from config of the master and
|
||||||
|
`worker_replication_port` from worker config. Note that a HTTP listener with a
|
||||||
|
`replication` resource is still required.
|
||||||
|
|
||||||
|
## Minimum version of Poetry is now v1.2.0
|
||||||
|
|
||||||
|
The minimum supported version of poetry is now 1.2. This should only affect
|
||||||
|
those installing from a source checkout.
|
||||||
|
|
||||||
# Upgrading to v1.66.0
|
# Upgrading to v1.66.0
|
||||||
|
|
||||||
## Delegation of email validation no longer supported
|
## Delegation of email validation no longer supported
|
||||||
|
@ -1200,7 +1222,7 @@ updated.
|
||||||
When setting up worker processes, we now recommend the use of a Redis
|
When setting up worker processes, we now recommend the use of a Redis
|
||||||
server for replication. **The old direct TCP connection method is
|
server for replication. **The old direct TCP connection method is
|
||||||
deprecated and will be removed in a future release.** See
|
deprecated and will be removed in a future release.** See
|
||||||
[workers](workers.md) for more details.
|
the [worker documentation](https://matrix-org.github.io/synapse/v1.66/workers.html) for more details.
|
||||||
|
|
||||||
# Upgrading to v1.14.0
|
# Upgrading to v1.14.0
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,84 @@
|
||||||
|
# Monthly Active Users
|
||||||
|
|
||||||
|
Synapse can be configured to record the number of monthly active users (also referred to as MAU) on a given homeserver.
|
||||||
|
For clarity's sake, MAU only tracks local users.
|
||||||
|
|
||||||
|
Please note that the metrics recorded by the [Homeserver Usage Stats](../../usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||||
|
are calculated differently. The `monthly_active_users` from the usage stats does not take into account any
|
||||||
|
of the rules below, and counts any users who have made a request to the homeserver in the last 30 days.
|
||||||
|
|
||||||
|
See the [configuration manual](../../usage/configuration/config_documentation.md#limit_usage_by_mau) for details on how to configure MAU.
|
||||||
|
|
||||||
|
## Calculating active users
|
||||||
|
|
||||||
|
Individual user activity is measured in active days. If a user performs an action, the exact time of that action is then recorded. When
|
||||||
|
calculating the MAU figure, any users with a recorded action in the last 30 days are considered part of the cohort. Days are measured
|
||||||
|
as a rolling window from the current system time to 30 days ago.
|
||||||
|
|
||||||
|
So for example, if Synapse were to calculate the active users on the 15th July at 13:25, it would include any activity from 15th June 13:25 onwards.
|
||||||
|
|
||||||
|
A user is **never** considered active if they are either:
|
||||||
|
- Part of the trial day cohort (described below)
|
||||||
|
- Owned by an application service.
|
||||||
|
- Note: This **only** covers users that are part of an application service `namespaces.users` registration. The namespace
|
||||||
|
must also be marked as `exclusive`.
|
||||||
|
|
||||||
|
Otherwise, any request to Synapse will mark the user as active. Please note that registration will not mark a user as active *unless*
|
||||||
|
they register with a 3pid that is included in the config field `mau_limits_reserved_threepids`.
|
||||||
|
|
||||||
|
The Prometheus metric for MAU is refreshed every 5 minutes.
|
||||||
|
|
||||||
|
Once an hour, Synapse checks to see if any users are inactive (with only activity timestamps later than 30 days). These users
|
||||||
|
are removed from the active users cohort. If they then become active, they are immediately restored to the cohort.
|
||||||
|
|
||||||
|
It is important to note that **deactivated** users are not immediately removed from the pool of active users, but as these users won't
|
||||||
|
perform actions they will eventually be removed from the cohort.
|
||||||
|
|
||||||
|
### Trial days
|
||||||
|
|
||||||
|
If the config option `mau_trial_days` is set, a user must have been active this many days **after** registration to be active. A user is in the
|
||||||
|
trial period if their registration timestamp (also known as the `creation_ts`) is less than `mau_trial_days` old.
|
||||||
|
|
||||||
|
As an example, if `mau_trial_days` is set to `3` and a user is active **after** 3 days (72 hours from registration time) then they will be counted as active.
|
||||||
|
|
||||||
|
The `mau_appservice_trial_days` config further extends this rule by applying different durations depending on the `appservice_id` of the user.
|
||||||
|
Users registered by an application service will be recorded with an `appservice_id` matching the `id` key in the registration file for that service.
|
||||||
|
|
||||||
|
|
||||||
|
## Limiting usage of the homeserver when the maximum MAU is reached
|
||||||
|
|
||||||
|
If both config options `limit_usage_by_mau` and `max_mau_value` is set, and the current MAU value exceeds the maximum value, the
|
||||||
|
homeserver will begin to block some actions.
|
||||||
|
|
||||||
|
Individual users matching **any** of the below criteria never have their actions blocked:
|
||||||
|
- Considered part of the cohort of MAU users.
|
||||||
|
- Considered part of the trial period.
|
||||||
|
- Registered as a `support` user.
|
||||||
|
- Application service users if `track_appservice_user_ips` is NOT set.
|
||||||
|
|
||||||
|
Please not that server admins are **not** exempt from blocking.
|
||||||
|
|
||||||
|
The following actions are blocked when the MAU limit is exceeded:
|
||||||
|
- Logging in
|
||||||
|
- Sending events
|
||||||
|
- Creating rooms
|
||||||
|
- Syncing
|
||||||
|
|
||||||
|
Registration is also blocked for all new signups *unless* the user is registering with a threepid included in the `mau_limits_reserved_threepids`
|
||||||
|
config value.
|
||||||
|
|
||||||
|
When a request is blocked, the response will have the `errcode` `M_RESOURCE_LIMIT_EXCEEDED`.
|
||||||
|
|
||||||
|
## Metrics
|
||||||
|
|
||||||
|
Synapse records several different prometheus metrics for MAU.
|
||||||
|
|
||||||
|
`synapse_admin_mau:current` records the current MAU figure for native (non-application-service) users.
|
||||||
|
|
||||||
|
`synapse_admin_mau:max` records the maximum MAU as dictated by the `max_mau_value` config value.
|
||||||
|
|
||||||
|
`synapse_admin_mau_current_mau_by_service` records the current MAU including application service users. The label `app_service` can be used
|
||||||
|
to filter by a specific service ID. This *also* includes non-application-service users under `app_service=native` .
|
||||||
|
|
||||||
|
`synapse_admin_mau:registered_reserved_users` records the number of users specified in `mau_limits_reserved_threepids` which have
|
||||||
|
registered accounts on the homeserver.
|
|
@ -431,8 +431,6 @@ Sub-options for each listener include:
|
||||||
|
|
||||||
* `metrics`: (see the docs [here](../../metrics-howto.md)),
|
* `metrics`: (see the docs [here](../../metrics-howto.md)),
|
||||||
|
|
||||||
* `replication`: (see the docs [here](../../workers.md)).
|
|
||||||
|
|
||||||
* `tls`: set to true to enable TLS for this listener. Will use the TLS key/cert specified in tls_private_key_path / tls_certificate_path.
|
* `tls`: set to true to enable TLS for this listener. Will use the TLS key/cert specified in tls_private_key_path / tls_certificate_path.
|
||||||
|
|
||||||
* `x_forwarded`: Only valid for an 'http' listener. Set to true to use the X-Forwarded-For header as the client IP. Useful when Synapse is
|
* `x_forwarded`: Only valid for an 'http' listener. Set to true to use the X-Forwarded-For header as the client IP. Useful when Synapse is
|
||||||
|
@ -595,6 +593,8 @@ server owner wants to limit to the number of monthly active users. When enabled
|
||||||
reached the server returns a `ResourceLimitError` with error type `Codes.RESOURCE_LIMIT_EXCEEDED`.
|
reached the server returns a `ResourceLimitError` with error type `Codes.RESOURCE_LIMIT_EXCEEDED`.
|
||||||
Defaults to false. If this is enabled, a value for `max_mau_value` must also be set.
|
Defaults to false. If this is enabled, a value for `max_mau_value` must also be set.
|
||||||
|
|
||||||
|
See [Monthly Active Users](../administration/monthly_active_users.md) for details on how to configure MAU.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
limit_usage_by_mau: true
|
limit_usage_by_mau: true
|
||||||
|
@ -1873,8 +1873,8 @@ See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha.
|
||||||
---
|
---
|
||||||
### `recaptcha_public_key`
|
### `recaptcha_public_key`
|
||||||
|
|
||||||
This homeserver's ReCAPTCHA public key. Must be specified if `enable_registration_captcha` is
|
This homeserver's ReCAPTCHA public key. Must be specified if
|
||||||
enabled.
|
[`enable_registration_captcha`](#enable_registration_captcha) is enabled.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -1883,7 +1883,8 @@ recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||||
---
|
---
|
||||||
### `recaptcha_private_key`
|
### `recaptcha_private_key`
|
||||||
|
|
||||||
This homeserver's ReCAPTCHA private key. Must be specified if `enable_registration_captcha` is
|
This homeserver's ReCAPTCHA private key. Must be specified if
|
||||||
|
[`enable_registration_captcha`](#enable_registration_captcha) is
|
||||||
enabled.
|
enabled.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
|
@ -1893,9 +1894,11 @@ recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
||||||
---
|
---
|
||||||
### `enable_registration_captcha`
|
### `enable_registration_captcha`
|
||||||
|
|
||||||
Set to true to enable ReCaptcha checks when registering, preventing signup
|
Set to `true` to require users to complete a CAPTCHA test when registering an account.
|
||||||
unless a captcha is answered. Requires a valid ReCaptcha public/private key.
|
Requires a valid ReCaptcha public/private key.
|
||||||
Defaults to false.
|
Defaults to `false`.
|
||||||
|
|
||||||
|
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -1971,9 +1974,21 @@ Registration can be rate-limited using the parameters in the [Ratelimiting](#rat
|
||||||
---
|
---
|
||||||
### `enable_registration`
|
### `enable_registration`
|
||||||
|
|
||||||
Enable registration for new users. Defaults to false. It is highly recommended that if you enable registration,
|
Enable registration for new users. Defaults to `false`.
|
||||||
you use either captcha, email, or token-based verification to verify that new users are not bots. In order to enable registration
|
|
||||||
without any verification, you must also set `enable_registration_without_verification` to true.
|
It is highly recommended that if you enable registration, you set one or more
|
||||||
|
or the following options, to avoid abuse of your server by "bots":
|
||||||
|
|
||||||
|
* [`enable_registration_captcha`](#enable_registration_captcha)
|
||||||
|
* [`registrations_require_3pid`](#registrations_require_3pid)
|
||||||
|
* [`registration_requires_token`](#registration_requires_token)
|
||||||
|
|
||||||
|
(In order to enable registration without any verification, you must also set
|
||||||
|
[`enable_registration_without_verification`](#enable_registration_without_verification).)
|
||||||
|
|
||||||
|
Note that even if this setting is disabled, new accounts can still be created
|
||||||
|
via the admin API if
|
||||||
|
[`registration_shared_secret`](#registration_shared_secret) is set.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -1981,88 +1996,21 @@ enable_registration: true
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `enable_registration_without_verification`
|
### `enable_registration_without_verification`
|
||||||
|
|
||||||
Enable registration without email or captcha verification. Note: this option is *not* recommended,
|
Enable registration without email or captcha verification. Note: this option is *not* recommended,
|
||||||
as registration without verification is a known vector for spam and abuse. Defaults to false. Has no effect
|
as registration without verification is a known vector for spam and abuse. Defaults to `false`. Has no effect
|
||||||
unless `enable_registration` is also enabled.
|
unless [`enable_registration`](#enable_registration) is also enabled.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
enable_registration_without_verification: true
|
enable_registration_without_verification: true
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `session_lifetime`
|
|
||||||
|
|
||||||
Time that a user's session remains valid for, after they log in.
|
|
||||||
|
|
||||||
Note that this is not currently compatible with guest logins.
|
|
||||||
|
|
||||||
Note also that this is calculated at login time: changes are not applied retrospectively to users who have already
|
|
||||||
logged in.
|
|
||||||
|
|
||||||
By default, this is infinite.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
session_lifetime: 24h
|
|
||||||
```
|
|
||||||
----
|
|
||||||
### `refresh_access_token_lifetime`
|
|
||||||
|
|
||||||
Time that an access token remains valid for, if the session is using refresh tokens.
|
|
||||||
|
|
||||||
For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md).
|
|
||||||
|
|
||||||
Note that this only applies to clients which advertise support for refresh tokens.
|
|
||||||
|
|
||||||
Note also that this is calculated at login time and refresh time: changes are not applied to
|
|
||||||
existing sessions until they are refreshed.
|
|
||||||
|
|
||||||
By default, this is 5 minutes.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
refreshable_access_token_lifetime: 10m
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `refresh_token_lifetime: 24h`
|
|
||||||
|
|
||||||
Time that a refresh token remains valid for (provided that it is not
|
|
||||||
exchanged for another one first).
|
|
||||||
This option can be used to automatically log-out inactive sessions.
|
|
||||||
Please see the manual for more information.
|
|
||||||
|
|
||||||
Note also that this is calculated at login time and refresh time:
|
|
||||||
changes are not applied to existing sessions until they are refreshed.
|
|
||||||
|
|
||||||
By default, this is infinite.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
refresh_token_lifetime: 24h
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `nonrefreshable_access_token_lifetime`
|
|
||||||
|
|
||||||
Time that an access token remains valid for, if the session is NOT
|
|
||||||
using refresh tokens.
|
|
||||||
|
|
||||||
Please note that not all clients support refresh tokens, so setting
|
|
||||||
this to a short value may be inconvenient for some users who will
|
|
||||||
then be logged out frequently.
|
|
||||||
|
|
||||||
Note also that this is calculated at login time: changes are not applied
|
|
||||||
retrospectively to existing sessions for users that have already logged in.
|
|
||||||
|
|
||||||
By default, this is infinite.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
nonrefreshable_access_token_lifetime: 24h
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `registrations_require_3pid`
|
### `registrations_require_3pid`
|
||||||
|
|
||||||
If this is set, the user must provide all of the specified types of 3PID when registering.
|
If this is set, users must provide all of the specified types of 3PID when registering an account.
|
||||||
|
|
||||||
|
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -2110,9 +2058,11 @@ enable_3pid_lookup: false
|
||||||
|
|
||||||
Require users to submit a token during registration.
|
Require users to submit a token during registration.
|
||||||
Tokens can be managed using the admin [API](../administration/admin_api/registration_tokens.md).
|
Tokens can be managed using the admin [API](../administration/admin_api/registration_tokens.md).
|
||||||
Note that `enable_registration` must be set to true.
|
|
||||||
Disabling this option will not delete any tokens previously generated.
|
Disabling this option will not delete any tokens previously generated.
|
||||||
Defaults to false. Set to true to enable.
|
Defaults to `false`. Set to `true` to enable.
|
||||||
|
|
||||||
|
|
||||||
|
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -2121,13 +2071,39 @@ registration_requires_token: true
|
||||||
---
|
---
|
||||||
### `registration_shared_secret`
|
### `registration_shared_secret`
|
||||||
|
|
||||||
If set, allows registration of standard or admin accounts by anyone who
|
If set, allows registration of standard or admin accounts by anyone who has the
|
||||||
has the shared secret, even if registration is otherwise disabled.
|
shared secret, even if [`enable_registration`](#enable_registration) is not
|
||||||
|
set.
|
||||||
|
|
||||||
|
This is primarily intended for use with the `register_new_matrix_user` script
|
||||||
|
(see [Registering a user](../../setup/installation.md#registering-a-user));
|
||||||
|
however, the interface is [documented](../admin_api/register_api.html).
|
||||||
|
|
||||||
|
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
registration_shared_secret: <PRIVATE STRING>
|
registration_shared_secret: <PRIVATE STRING>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
### `registration_shared_secret_path`
|
||||||
|
|
||||||
|
An alternative to [`registration_shared_secret`](#registration_shared_secret):
|
||||||
|
allows the shared secret to be specified in an external file.
|
||||||
|
|
||||||
|
The file should be a plain text file, containing only the shared secret.
|
||||||
|
|
||||||
|
If this file does not exist, Synapse will create a new signing
|
||||||
|
key on startup and store it in this file.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
registration_shared_secret_file: /path/to/secrets/file
|
||||||
|
```
|
||||||
|
|
||||||
|
_Added in Synapse 1.67.0._
|
||||||
|
|
||||||
---
|
---
|
||||||
### `bcrypt_rounds`
|
### `bcrypt_rounds`
|
||||||
|
|
||||||
|
@ -2358,6 +2334,79 @@ Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
inhibit_user_in_use_error: true
|
inhibit_user_in_use_error: true
|
||||||
```
|
```
|
||||||
|
---
|
||||||
|
## User session management
|
||||||
|
---
|
||||||
|
### `session_lifetime`
|
||||||
|
|
||||||
|
Time that a user's session remains valid for, after they log in.
|
||||||
|
|
||||||
|
Note that this is not currently compatible with guest logins.
|
||||||
|
|
||||||
|
Note also that this is calculated at login time: changes are not applied retrospectively to users who have already
|
||||||
|
logged in.
|
||||||
|
|
||||||
|
By default, this is infinite.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
session_lifetime: 24h
|
||||||
|
```
|
||||||
|
----
|
||||||
|
### `refresh_access_token_lifetime`
|
||||||
|
|
||||||
|
Time that an access token remains valid for, if the session is using refresh tokens.
|
||||||
|
|
||||||
|
For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md).
|
||||||
|
|
||||||
|
Note that this only applies to clients which advertise support for refresh tokens.
|
||||||
|
|
||||||
|
Note also that this is calculated at login time and refresh time: changes are not applied to
|
||||||
|
existing sessions until they are refreshed.
|
||||||
|
|
||||||
|
By default, this is 5 minutes.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
refreshable_access_token_lifetime: 10m
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `refresh_token_lifetime: 24h`
|
||||||
|
|
||||||
|
Time that a refresh token remains valid for (provided that it is not
|
||||||
|
exchanged for another one first).
|
||||||
|
This option can be used to automatically log-out inactive sessions.
|
||||||
|
Please see the manual for more information.
|
||||||
|
|
||||||
|
Note also that this is calculated at login time and refresh time:
|
||||||
|
changes are not applied to existing sessions until they are refreshed.
|
||||||
|
|
||||||
|
By default, this is infinite.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
refresh_token_lifetime: 24h
|
||||||
|
```
|
||||||
|
---
|
||||||
|
### `nonrefreshable_access_token_lifetime`
|
||||||
|
|
||||||
|
Time that an access token remains valid for, if the session is NOT
|
||||||
|
using refresh tokens.
|
||||||
|
|
||||||
|
Please note that not all clients support refresh tokens, so setting
|
||||||
|
this to a short value may be inconvenient for some users who will
|
||||||
|
then be logged out frequently.
|
||||||
|
|
||||||
|
Note also that this is calculated at login time: changes are not applied
|
||||||
|
retrospectively to existing sessions for users that have already logged in.
|
||||||
|
|
||||||
|
By default, this is infinite.
|
||||||
|
|
||||||
|
Example configuration:
|
||||||
|
```yaml
|
||||||
|
nonrefreshable_access_token_lifetime: 24h
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
## Metrics ###
|
## Metrics ###
|
||||||
Config options related to metrics.
|
Config options related to metrics.
|
||||||
|
@ -2435,7 +2484,7 @@ report_stats_endpoint: https://example.com/report-usage-stats/push
|
||||||
Config settings related to the client/server API
|
Config settings related to the client/server API
|
||||||
|
|
||||||
---
|
---
|
||||||
### `room_prejoin_state:`
|
### `room_prejoin_state`
|
||||||
|
|
||||||
Controls for the state that is shared with users who receive an invite
|
Controls for the state that is shared with users who receive an invite
|
||||||
to a room. By default, the following state event types are shared with users who
|
to a room. By default, the following state event types are shared with users who
|
||||||
|
@ -2537,7 +2586,10 @@ Config options relating to signing keys
|
||||||
---
|
---
|
||||||
### `signing_key_path`
|
### `signing_key_path`
|
||||||
|
|
||||||
Path to the signing key to sign messages with.
|
Path to the signing key to sign events and federation requests with.
|
||||||
|
|
||||||
|
*New in Synapse 1.67*: If this file does not exist, Synapse will create a new signing
|
||||||
|
key on startup and store it in this file.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -2572,7 +2624,7 @@ Example configuration:
|
||||||
key_refresh_interval: 2d
|
key_refresh_interval: 2d
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `trusted_key_servers:`
|
### `trusted_key_servers`
|
||||||
|
|
||||||
The trusted servers to download signing keys from.
|
The trusted servers to download signing keys from.
|
||||||
|
|
||||||
|
@ -2642,13 +2694,10 @@ key_server_signing_keys_path: "key_server_signing_keys.key"
|
||||||
The following settings can be used to make Synapse use a single sign-on
|
The following settings can be used to make Synapse use a single sign-on
|
||||||
provider for authentication, instead of its internal password database.
|
provider for authentication, instead of its internal password database.
|
||||||
|
|
||||||
You will probably also want to set the following options to false to
|
You will probably also want to set the following options to `false` to
|
||||||
disable the regular login/registration flows:
|
disable the regular login/registration flows:
|
||||||
* `enable_registration`
|
* [`enable_registration`](#enable_registration)
|
||||||
* `password_config.enabled`
|
* [`password_config.enabled`](#password_config)
|
||||||
|
|
||||||
You will also want to investigate the settings under the "sso" configuration
|
|
||||||
section below.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
### `saml2_config`
|
### `saml2_config`
|
||||||
|
|
|
@ -32,13 +32,8 @@ stream between all configured Synapse processes. Additionally, processes may
|
||||||
make HTTP requests to each other, primarily for operations which need to wait
|
make HTTP requests to each other, primarily for operations which need to wait
|
||||||
for a reply ─ such as sending an event.
|
for a reply ─ such as sending an event.
|
||||||
|
|
||||||
Redis support was added in v1.13.0 with it becoming the recommended method in
|
All the workers and the main process connect to Redis, which relays replication
|
||||||
v1.18.0. It replaced the old direct TCP connections (which is deprecated as of
|
commands between processes.
|
||||||
v1.18.0) to the main process. With Redis, rather than all the workers connecting
|
|
||||||
to the main process, all the workers and the main process connect to Redis,
|
|
||||||
which relays replication commands between processes. This can give a significant
|
|
||||||
cpu saving on the main process and will be a prerequisite for upcoming
|
|
||||||
performance improvements.
|
|
||||||
|
|
||||||
If Redis support is enabled Synapse will use it as a shared cache, as well as a
|
If Redis support is enabled Synapse will use it as a shared cache, as well as a
|
||||||
pub/sub mechanism.
|
pub/sub mechanism.
|
||||||
|
@ -117,23 +112,26 @@ redis:
|
||||||
enabled: true
|
enabled: true
|
||||||
```
|
```
|
||||||
|
|
||||||
See the sample config for the full documentation of each option.
|
See the [configuration manual](usage/configuration/config_documentation.html) for the full documentation of each option.
|
||||||
|
|
||||||
Under **no circumstances** should the replication listener be exposed to the
|
Under **no circumstances** should the replication listener be exposed to the
|
||||||
public internet; it has no authentication and is unencrypted.
|
public internet; replication traffic is:
|
||||||
|
|
||||||
|
* always unencrypted
|
||||||
|
* unauthenticated, unless `worker_replication_secret` is configured
|
||||||
|
|
||||||
|
|
||||||
### Worker configuration
|
### Worker configuration
|
||||||
|
|
||||||
In the config file for each worker, you must specify the type of worker
|
In the config file for each worker, you must specify:
|
||||||
application (`worker_app`), and you should specify a unique name for the worker
|
* The type of worker (`worker_app`). The currently available worker applications are listed below.
|
||||||
(`worker_name`). The currently available worker applications are listed below.
|
* A unique name for the worker (`worker_name`).
|
||||||
You must also specify the HTTP replication endpoint that it should talk to on
|
* The HTTP replication endpoint that it should talk to on the main synapse process
|
||||||
the main synapse process. `worker_replication_host` should specify the host of
|
(`worker_replication_host` and `worker_replication_http_port`)
|
||||||
the main synapse and `worker_replication_http_port` should point to the HTTP
|
* If handling HTTP requests, a `worker_listeners` option with an `http`
|
||||||
replication port. If the worker will handle HTTP requests then the
|
listener, in the same way as the `listeners` option in the shared config.
|
||||||
`worker_listeners` option should be set with a `http` listener, in the same way
|
* If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
|
||||||
as the `listeners` option in the shared config.
|
the main process (`worker_main_http_uri`).
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
|
@ -217,10 +215,12 @@ information.
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
|
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
|
||||||
|
|
||||||
# Encryption requests
|
# Encryption requests
|
||||||
|
# Note that ^/_matrix/client/(r0|v3|unstable)/keys/upload/ requires `worker_main_http_uri`
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||||
|
^/_matrix/client/(r0|v3|unstable)/keys/upload/
|
||||||
|
|
||||||
# Registration/login requests
|
# Registration/login requests
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||||
|
@ -325,7 +325,6 @@ effects of bursts of events from that bridge on events sent by normal users.
|
||||||
|
|
||||||
Additionally, the writing of specific streams (such as events) can be moved off
|
Additionally, the writing of specific streams (such as events) can be moved off
|
||||||
of the main process to a particular worker.
|
of the main process to a particular worker.
|
||||||
(This is only supported with Redis-based replication.)
|
|
||||||
|
|
||||||
To enable this, the worker must have a HTTP replication listener configured,
|
To enable this, the worker must have a HTTP replication listener configured,
|
||||||
have a `worker_name` and be listed in the `instance_map` config. The same worker
|
have a `worker_name` and be listed in the `instance_map` config. The same worker
|
||||||
|
@ -581,52 +580,23 @@ handle it, and are online.
|
||||||
If `update_user_directory` is set to `false`, and this worker is not running,
|
If `update_user_directory` is set to `false`, and this worker is not running,
|
||||||
the above endpoint may give outdated results.
|
the above endpoint may give outdated results.
|
||||||
|
|
||||||
### `synapse.app.frontend_proxy`
|
|
||||||
|
|
||||||
Proxies some frequently-requested client endpoints to add caching and remove
|
|
||||||
load from the main synapse. It can handle REST endpoints matching the following
|
|
||||||
regular expressions:
|
|
||||||
|
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
|
||||||
|
|
||||||
If `use_presence` is False in the homeserver config, it can also handle REST
|
|
||||||
endpoints matching the following regular expressions:
|
|
||||||
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/[^/]+/status
|
|
||||||
|
|
||||||
This "stub" presence handler will pass through `GET` request but make the
|
|
||||||
`PUT` effectively a no-op.
|
|
||||||
|
|
||||||
It will proxy any requests it cannot handle to the main synapse instance. It
|
|
||||||
must therefore be configured with the location of the main instance, via
|
|
||||||
the `worker_main_http_uri` setting in the `frontend_proxy` worker configuration
|
|
||||||
file. For example:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
worker_main_http_uri: http://127.0.0.1:8008
|
|
||||||
```
|
|
||||||
|
|
||||||
### Historical apps
|
### Historical apps
|
||||||
|
|
||||||
*Note:* Historically there used to be more apps, however they have been
|
The following used to be separate worker application types, but are now
|
||||||
amalgamated into a single `synapse.app.generic_worker` app. The remaining apps
|
equivalent to `synapse.app.generic_worker`:
|
||||||
are ones that do specific processing unrelated to requests, e.g. the `pusher`
|
|
||||||
that handles sending out push notifications for new events. The intention is for
|
* `synapse.app.client_reader`
|
||||||
all these to be folded into the `generic_worker` app and to use config to define
|
* `synapse.app.event_creator`
|
||||||
which processes handle the various proccessing such as push notifications.
|
* `synapse.app.federation_reader`
|
||||||
|
* `synapse.app.frontend_proxy`
|
||||||
|
* `synapse.app.synchrotron`
|
||||||
|
|
||||||
|
|
||||||
## Migration from old config
|
## Migration from old config
|
||||||
|
|
||||||
There are two main independent changes that have been made: introducing Redis
|
A main change that has occurred is the merging of worker apps into
|
||||||
support and merging apps into `synapse.app.generic_worker`. Both these changes
|
`synapse.app.generic_worker`. This change is backwards compatible and so no
|
||||||
are backwards compatible and so no changes to the config are required, however
|
changes to the config are required.
|
||||||
server admins are encouraged to plan to migrate to Redis as the old style direct
|
|
||||||
TCP replication config is deprecated.
|
|
||||||
|
|
||||||
To migrate to Redis add the `redis` config as above, and optionally remove the
|
|
||||||
TCP `replication` listener from master and `worker_replication_port` from worker
|
|
||||||
config.
|
|
||||||
|
|
||||||
To migrate apps to use `synapse.app.generic_worker` simply update the
|
To migrate apps to use `synapse.app.generic_worker` simply update the
|
||||||
`worker_app` option in the worker configs, and where worker are started (e.g.
|
`worker_app` option in the worker configs, and where worker are started (e.g.
|
||||||
|
|
|
@ -7,10 +7,10 @@ optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
|
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||||
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
|
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||||
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
|
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
|
||||||
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
|
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "authlib"
|
name = "authlib"
|
||||||
|
@ -39,7 +39,7 @@ attrs = ">=19.2.0"
|
||||||
six = "*"
|
six = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
visualize = ["graphviz (>0.5.1)", "Twisted (>=16.1.1)"]
|
visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bcrypt"
|
name = "bcrypt"
|
||||||
|
@ -177,7 +177,7 @@ optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["hypothesis (==3.55.3)", "flake8 (==3.7.8)"]
|
test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "constantly"
|
name = "constantly"
|
||||||
|
@ -199,12 +199,12 @@ python-versions = ">=3.6"
|
||||||
cffi = ">=1.12"
|
cffi = ">=1.12"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
|
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"]
|
||||||
docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
|
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
|
||||||
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
|
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
|
||||||
sdist = ["setuptools_rust (>=0.11.4)"]
|
sdist = ["setuptools_rust (>=0.11.4)"]
|
||||||
ssh = ["bcrypt (>=3.1.5)"]
|
ssh = ["bcrypt (>=3.1.5)"]
|
||||||
test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
|
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "defusedxml"
|
name = "defusedxml"
|
||||||
|
@ -226,7 +226,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
wrapt = ">=1.10,<2"
|
wrapt = ">=1.10,<2"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
|
dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "docutils"
|
name = "docutils"
|
||||||
|
@ -245,7 +245,7 @@ optional = true
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["tox", "coverage", "lxml", "xmlschema (>=1.8.0)", "sphinx", "memory-profiler", "flake8", "mypy (==0.910)"]
|
dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flake8"
|
name = "flake8"
|
||||||
|
@ -274,7 +274,7 @@ attrs = ">=19.2.0"
|
||||||
flake8 = ">=3.0.0"
|
flake8 = ">=3.0.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["coverage", "black", "hypothesis", "hypothesmith"]
|
dev = ["black", "coverage", "hypothesis", "hypothesmith"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flake8-comprehensions"
|
name = "flake8-comprehensions"
|
||||||
|
@ -367,8 +367,8 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
||||||
zipp = ">=0.5"
|
zipp = ">=0.5"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
|
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||||
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
|
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-resources"
|
name = "importlib-resources"
|
||||||
|
@ -382,8 +382,8 @@ python-versions = ">=3.6"
|
||||||
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
|
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
|
testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "incremental"
|
name = "incremental"
|
||||||
|
@ -405,9 +405,9 @@ optional = false
|
||||||
python-versions = ">=3.6,<4.0"
|
python-versions = ">=3.6,<4.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
|
|
||||||
requirements_deprecated_finder = ["pipreqs", "pip-api"]
|
|
||||||
colors = ["colorama (>=0.4.3,<0.5.0)"]
|
colors = ["colorama (>=0.4.3,<0.5.0)"]
|
||||||
|
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
|
||||||
|
requirements_deprecated_finder = ["pip-api", "pipreqs"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jaeger-client"
|
name = "jaeger-client"
|
||||||
|
@ -424,7 +424,7 @@ thrift = "*"
|
||||||
tornado = ">=4.3"
|
tornado = ">=4.3"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["mock", "pycurl", "pytest", "pytest-cov", "coverage", "pytest-timeout", "pytest-tornado", "pytest-benchmark", "pytest-localserver", "flake8", "flake8-quotes", "flake8-typing-imports", "codecov", "tchannel (==2.1.0)", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "mypy"]
|
tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jeepney"
|
name = "jeepney"
|
||||||
|
@ -435,8 +435,8 @@ optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
trio = ["async-generator", "trio"]
|
test = ["async-timeout", "pytest", "pytest-asyncio", "pytest-trio", "testpath", "trio"]
|
||||||
test = ["async-timeout", "trio", "testpath", "pytest-asyncio", "pytest-trio", "pytest"]
|
trio = ["async_generator", "trio"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jinja2"
|
name = "jinja2"
|
||||||
|
@ -486,8 +486,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_
|
||||||
SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
|
SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
|
docs = ["jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"]
|
||||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
|
testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ldap3"
|
name = "ldap3"
|
||||||
|
@ -511,7 +511,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
|
||||||
[package.extras]
|
[package.extras]
|
||||||
cssselect = ["cssselect (>=0.7)"]
|
cssselect = ["cssselect (>=0.7)"]
|
||||||
html5 = ["html5lib"]
|
html5 = ["html5lib"]
|
||||||
htmlsoup = ["beautifulsoup4"]
|
htmlsoup = ["BeautifulSoup4"]
|
||||||
source = ["Cython (>=0.29.7)"]
|
source = ["Cython (>=0.29.7)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -535,8 +535,8 @@ attrs = "*"
|
||||||
importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""}
|
importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["aiounittest", "twisted", "tox"]
|
dev = ["aiounittest", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "mypy (==0.910)", "tox", "twine (==4.0.1)", "twisted"]
|
||||||
dev = ["twine (==4.0.1)", "build (==0.8.0)", "isort (==5.9.3)", "flake8 (==4.0.1)", "black (==22.3.0)", "mypy (==0.910)", "aiounittest", "twisted", "tox"]
|
test = ["aiounittest", "tox", "twisted"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matrix-synapse-ldap3"
|
name = "matrix-synapse-ldap3"
|
||||||
|
@ -552,7 +552,7 @@ service-identity = "*"
|
||||||
Twisted = ">=15.1.0"
|
Twisted = ">=15.1.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["isort (==5.9.3)", "flake8 (==4.0.1)", "black (==22.3.0)", "types-setuptools", "mypy (==0.910)", "ldaptor", "tox", "matrix-synapse"]
|
dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "matrix-synapse", "mypy (==0.910)", "tox", "types-setuptools"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mccabe"
|
name = "mccabe"
|
||||||
|
@ -611,7 +611,7 @@ mypy = "0.950"
|
||||||
"zope.schema" = "*"
|
"zope.schema" = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["pytest (>=4.6)", "pytest-cov", "lxml"]
|
test = ["lxml", "pytest (>=4.6)", "pytest-cov"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "netaddr"
|
name = "netaddr"
|
||||||
|
@ -630,7 +630,7 @@ optional = true
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["doubles", "flake8", "flake8-quotes", "mock", "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-rtd-theme", "six (>=1.10.0,<2.0)", "gevent", "tornado"]
|
tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
|
@ -835,10 +835,10 @@ optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
|
||||||
docs = ["zope.interface", "sphinx-rtd-theme", "sphinx"]
|
|
||||||
dev = ["pre-commit", "mypy", "coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)", "cryptography (>=3.3.1)", "zope.interface", "sphinx-rtd-theme", "sphinx"]
|
|
||||||
crypto = ["cryptography (>=3.3.1)"]
|
crypto = ["cryptography (>=3.3.1)"]
|
||||||
|
dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||||
|
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||||
|
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pymacaroons"
|
name = "pymacaroons"
|
||||||
|
@ -872,8 +872,8 @@ python-versions = ">=3.6"
|
||||||
cffi = ">=1.4.1"
|
cffi = ">=1.4.1"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
|
docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"]
|
||||||
tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
|
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyopenssl"
|
name = "pyopenssl"
|
||||||
|
@ -925,11 +925,12 @@ pyOpenSSL = "*"
|
||||||
python-dateutil = "*"
|
python-dateutil = "*"
|
||||||
pytz = "*"
|
pytz = "*"
|
||||||
requests = ">=1.0.0"
|
requests = ">=1.0.0"
|
||||||
|
setuptools = "*"
|
||||||
six = "*"
|
six = "*"
|
||||||
xmlschema = ">=1.2.1"
|
xmlschema = ">=1.2.1"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
s2repoze = ["paste", "zope.interface", "repoze.who"]
|
s2repoze = ["paste", "repoze.who", "zope.interface"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dateutil"
|
name = "python-dateutil"
|
||||||
|
@ -1054,11 +1055,11 @@ celery = ["celery (>=3)"]
|
||||||
chalice = ["chalice (>=1.16.0)"]
|
chalice = ["chalice (>=1.16.0)"]
|
||||||
django = ["django (>=1.8)"]
|
django = ["django (>=1.8)"]
|
||||||
falcon = ["falcon (>=1.4)"]
|
falcon = ["falcon (>=1.4)"]
|
||||||
flask = ["flask (>=0.11)", "blinker (>=1.1)"]
|
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||||
httpx = ["httpx (>=0.16.0)"]
|
httpx = ["httpx (>=0.16.0)"]
|
||||||
pure_eval = ["pure-eval", "executing", "asttokens"]
|
pure_eval = ["asttokens", "executing", "pure-eval"]
|
||||||
pyspark = ["pyspark (>=2.4.4)"]
|
pyspark = ["pyspark (>=2.4.4)"]
|
||||||
quart = ["quart (>=0.16.1)", "blinker (>=1.1)"]
|
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||||
rq = ["rq (>=0.6)"]
|
rq = ["rq (>=0.6)"]
|
||||||
sanic = ["sanic (>=0.8)"]
|
sanic = ["sanic (>=0.8)"]
|
||||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||||
|
@ -1080,11 +1081,24 @@ pyasn1-modules = "*"
|
||||||
six = "*"
|
six = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["coverage[toml] (>=5.0.2)", "pytest", "sphinx", "furo", "idna", "pyopenssl"]
|
dev = ["coverage[toml] (>=5.0.2)", "furo", "idna", "pyOpenSSL", "pytest", "sphinx"]
|
||||||
docs = ["sphinx", "furo"]
|
docs = ["furo", "sphinx"]
|
||||||
idna = ["idna"]
|
idna = ["idna"]
|
||||||
tests = ["coverage[toml] (>=5.0.2)", "pytest"]
|
tests = ["coverage[toml] (>=5.0.2)", "pytest"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "setuptools"
|
||||||
|
version = "65.3.0"
|
||||||
|
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||||
|
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||||
|
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "signedjson"
|
name = "signedjson"
|
||||||
version = "1.1.4"
|
version = "1.1.4"
|
||||||
|
@ -1199,6 +1213,7 @@ click = "*"
|
||||||
click-default-group = "*"
|
click-default-group = "*"
|
||||||
incremental = "*"
|
incremental = "*"
|
||||||
jinja2 = "*"
|
jinja2 = "*"
|
||||||
|
setuptools = "*"
|
||||||
tomli = {version = "*", markers = "python_version >= \"3.6\""}
|
tomli = {version = "*", markers = "python_version >= \"3.6\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
@ -1236,7 +1251,7 @@ requests = ">=2.1.0"
|
||||||
Twisted = {version = ">=18.7.0", extras = ["tls"]}
|
Twisted = {version = ">=18.7.0", extras = ["tls"]}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["pep8", "pyflakes", "httpbin (==0.5.0)"]
|
dev = ["httpbin (==0.5.0)", "pep8", "pyflakes"]
|
||||||
docs = ["sphinx (>=1.4.8)"]
|
docs = ["sphinx (>=1.4.8)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1281,20 +1296,20 @@ typing-extensions = ">=3.6.5"
|
||||||
"zope.interface" = ">=4.4.2"
|
"zope.interface" = ">=4.4.2"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
all_non_platform = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
all_non_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
|
||||||
conch = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)"]
|
conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"]
|
||||||
conch_nacl = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pynacl"]
|
conch_nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"]
|
||||||
contextvars = ["contextvars (>=2.4,<3)"]
|
contextvars = ["contextvars (>=2.4,<3)"]
|
||||||
dev = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "python-subunit (>=1.4,<2.0)", "pydoctor (>=21.9.0,<21.10.0)"]
|
dev = ["coverage (>=6b1,<7)", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)"]
|
||||||
dev_release = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pydoctor (>=21.9.0,<21.10.0)"]
|
dev_release = ["pydoctor (>=21.9.0,<21.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)"]
|
||||||
http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"]
|
http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"]
|
||||||
macos_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
macos_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
|
||||||
mypy = ["mypy (==0.930)", "mypy-zope (==0.3.4)", "types-setuptools", "types-pyopenssl", "towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pynacl", "pywin32 (!=226)", "python-subunit (>=1.4,<2.0)", "contextvars (>=2.4,<3)", "pydoctor (>=21.9.0,<21.10.0)"]
|
mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"]
|
||||||
osx_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
osx_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
|
||||||
serial = ["pyserial (>=3.0)", "pywin32 (!=226)"]
|
serial = ["pyserial (>=3.0)", "pywin32 (!=226)"]
|
||||||
test = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)"]
|
test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)"]
|
||||||
tls = ["pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)"]
|
tls = ["idna (>=2.4)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)"]
|
||||||
windows_platform = ["pywin32 (!=226)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
windows_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "twisted-iocpsupport"
|
name = "twisted-iocpsupport"
|
||||||
|
@ -1472,7 +1487,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
brotli = ["brotlipy (>=0.6.0)"]
|
brotli = ["brotlipy (>=0.6.0)"]
|
||||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1504,8 +1519,8 @@ elementpath = ">=2.5.0,<3.0.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
codegen = ["elementpath (>=2.5.0,<3.0.0)", "jinja2"]
|
codegen = ["elementpath (>=2.5.0,<3.0.0)", "jinja2"]
|
||||||
dev = ["tox", "coverage", "lxml", "elementpath (>=2.5.0,<3.0.0)", "memory-profiler", "sphinx", "sphinx-rtd-theme", "jinja2", "flake8", "mypy", "lxml-stubs"]
|
dev = ["Sphinx", "coverage", "elementpath (>=2.5.0,<3.0.0)", "flake8", "jinja2", "lxml", "lxml-stubs", "memory-profiler", "mypy", "sphinx-rtd-theme", "tox"]
|
||||||
docs = ["elementpath (>=2.5.0,<3.0.0)", "sphinx", "sphinx-rtd-theme", "jinja2"]
|
docs = ["Sphinx", "elementpath (>=2.5.0,<3.0.0)", "jinja2", "sphinx-rtd-theme"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zipp"
|
name = "zipp"
|
||||||
|
@ -1516,8 +1531,8 @@ optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
|
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
|
testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zope.event"
|
name = "zope.event"
|
||||||
|
@ -1527,8 +1542,11 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
setuptools = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx"]
|
docs = ["Sphinx"]
|
||||||
test = ["zope.testrunner"]
|
test = ["zope.testrunner"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1539,8 +1557,11 @@ category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
setuptools = "*"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "repoze.sphinx.autointerface"]
|
docs = ["Sphinx", "repoze.sphinx.autointerface"]
|
||||||
test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||||
testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||||
|
|
||||||
|
@ -1553,11 +1574,12 @@ optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
setuptools = "*"
|
||||||
"zope.event" = "*"
|
"zope.event" = "*"
|
||||||
"zope.interface" = ">=5.0.0"
|
"zope.interface" = ">=5.0.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx", "repoze.sphinx.autointerface"]
|
docs = ["Sphinx", "repoze.sphinx.autointerface"]
|
||||||
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
|
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
|
||||||
|
|
||||||
[extras]
|
[extras]
|
||||||
|
@ -2458,6 +2480,10 @@ service-identity = [
|
||||||
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
|
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
|
||||||
{file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"},
|
{file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"},
|
||||||
]
|
]
|
||||||
|
setuptools = [
|
||||||
|
{file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"},
|
||||||
|
{file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"},
|
||||||
|
]
|
||||||
signedjson = [
|
signedjson = [
|
||||||
{file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"},
|
{file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"},
|
||||||
{file = "signedjson-1.1.4.tar.gz", hash = "sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492"},
|
{file = "signedjson-1.1.4.tar.gz", hash = "sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492"},
|
||||||
|
|
|
@ -54,7 +54,7 @@ skip_gitignore = true
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "matrix-synapse"
|
name = "matrix-synapse"
|
||||||
version = "1.66.0rc2"
|
version = "1.66.0"
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
|
|
|
@ -18,10 +18,12 @@
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from os import path
|
from os import path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
@ -71,18 +73,21 @@ def cli() -> None:
|
||||||
|
|
||||||
./scripts-dev/release.py tag
|
./scripts-dev/release.py tag
|
||||||
|
|
||||||
# ... wait for assets to build ...
|
# wait for assets to build, either manually or with:
|
||||||
|
./scripts-dev/release.py wait-for-actions
|
||||||
|
|
||||||
./scripts-dev/release.py publish
|
./scripts-dev/release.py publish
|
||||||
|
|
||||||
./scripts-dev/release.py upload
|
./scripts-dev/release.py upload
|
||||||
|
|
||||||
# Optional: generate some nice links for the announcement
|
|
||||||
|
|
||||||
./scripts-dev/release.py merge-back
|
./scripts-dev/release.py merge-back
|
||||||
|
|
||||||
|
# Optional: generate some nice links for the announcement
|
||||||
./scripts-dev/release.py announce
|
./scripts-dev/release.py announce
|
||||||
|
|
||||||
|
Alternatively, `./scripts-dev/release.py full` will do all the above
|
||||||
|
as well as guiding you through the manual steps.
|
||||||
|
|
||||||
If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the
|
If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the
|
||||||
`tag`/`publish` command, then a new draft release will be created/published.
|
`tag`/`publish` command, then a new draft release will be created/published.
|
||||||
"""
|
"""
|
||||||
|
@ -90,6 +95,10 @@ def cli() -> None:
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def prepare() -> None:
|
def prepare() -> None:
|
||||||
|
_prepare()
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare() -> None:
|
||||||
"""Do the initial stages of creating a release, including creating release
|
"""Do the initial stages of creating a release, including creating release
|
||||||
branch, updating changelog and pushing to GitHub.
|
branch, updating changelog and pushing to GitHub.
|
||||||
"""
|
"""
|
||||||
|
@ -284,6 +293,10 @@ def prepare() -> None:
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||||
def tag(gh_token: Optional[str]) -> None:
|
def tag(gh_token: Optional[str]) -> None:
|
||||||
|
_tag(gh_token)
|
||||||
|
|
||||||
|
|
||||||
|
def _tag(gh_token: Optional[str]) -> None:
|
||||||
"""Tags the release and generates a draft GitHub release"""
|
"""Tags the release and generates a draft GitHub release"""
|
||||||
|
|
||||||
# Make sure we're in a git repo.
|
# Make sure we're in a git repo.
|
||||||
|
@ -374,6 +387,10 @@ def tag(gh_token: Optional[str]) -> None:
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||||
def publish(gh_token: str) -> None:
|
def publish(gh_token: str) -> None:
|
||||||
|
_publish(gh_token)
|
||||||
|
|
||||||
|
|
||||||
|
def _publish(gh_token: str) -> None:
|
||||||
"""Publish release on GitHub."""
|
"""Publish release on GitHub."""
|
||||||
|
|
||||||
# Make sure we're in a git repo.
|
# Make sure we're in a git repo.
|
||||||
|
@ -411,6 +428,10 @@ def publish(gh_token: str) -> None:
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def upload() -> None:
|
def upload() -> None:
|
||||||
|
_upload()
|
||||||
|
|
||||||
|
|
||||||
|
def _upload() -> None:
|
||||||
"""Upload release to pypi."""
|
"""Upload release to pypi."""
|
||||||
|
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
|
@ -479,8 +500,75 @@ def _merge_into(repo: Repo, source: str, target: str) -> None:
|
||||||
repo.remote().push()
|
repo.remote().push()
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||||
|
def wait_for_actions(gh_token: Optional[str]) -> None:
|
||||||
|
_wait_for_actions(gh_token)
|
||||||
|
|
||||||
|
|
||||||
|
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||||
|
# Find out the version and tag name.
|
||||||
|
current_version = get_package_version()
|
||||||
|
tag_name = f"v{current_version}"
|
||||||
|
|
||||||
|
# Authentication is optional on this endpoint,
|
||||||
|
# but use a token if we have one to reduce the chance of being rate-limited.
|
||||||
|
url = f"https://api.github.com/repos/matrix-org/synapse/actions/runs?branch={tag_name}"
|
||||||
|
headers = {"Accept": "application/vnd.github+json"}
|
||||||
|
if gh_token is not None:
|
||||||
|
headers["authorization"] = f"token {gh_token}"
|
||||||
|
req = urllib.request.Request(url, headers=headers)
|
||||||
|
|
||||||
|
time.sleep(10 * 60)
|
||||||
|
while True:
|
||||||
|
time.sleep(5 * 60)
|
||||||
|
response = urllib.request.urlopen(req)
|
||||||
|
resp = json.loads(response.read())
|
||||||
|
|
||||||
|
if len(resp["workflow_runs"]) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if all(
|
||||||
|
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
|
||||||
|
):
|
||||||
|
success = (
|
||||||
|
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
|
||||||
|
)
|
||||||
|
if success:
|
||||||
|
_notify("Workflows successful. You can now continue the release.")
|
||||||
|
else:
|
||||||
|
_notify("Workflows failed.")
|
||||||
|
click.confirm("Continue anyway?", abort=True)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
def _notify(message: str) -> None:
|
||||||
|
# Send a bell character. Most terminals will play a sound or show a notification
|
||||||
|
# for this.
|
||||||
|
click.echo(f"\a{message}")
|
||||||
|
|
||||||
|
# Try and run notify-send, but don't raise an Exception if this fails
|
||||||
|
# (This is best-effort)
|
||||||
|
# TODO Support other platforms?
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"notify-send",
|
||||||
|
"--app-name",
|
||||||
|
"Synapse Release Script",
|
||||||
|
"--expire-time",
|
||||||
|
"3600000",
|
||||||
|
message,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def merge_back() -> None:
|
def merge_back() -> None:
|
||||||
|
_merge_back()
|
||||||
|
|
||||||
|
|
||||||
|
def _merge_back() -> None:
|
||||||
"""Merge the release branch back into the appropriate branches.
|
"""Merge the release branch back into the appropriate branches.
|
||||||
All branches will be automatically pulled from the remote and the results
|
All branches will be automatically pulled from the remote and the results
|
||||||
will be pushed to the remote."""
|
will be pushed to the remote."""
|
||||||
|
@ -519,6 +607,10 @@ def merge_back() -> None:
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def announce() -> None:
|
def announce() -> None:
|
||||||
|
_announce()
|
||||||
|
|
||||||
|
|
||||||
|
def _announce() -> None:
|
||||||
"""Generate markdown to announce the release."""
|
"""Generate markdown to announce the release."""
|
||||||
|
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
|
@ -548,10 +640,56 @@ Announce the release in
|
||||||
- #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic
|
- #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic
|
||||||
- #synapse:matrix.org (Synapse Admins), bumping the version in the topic
|
- #synapse:matrix.org (Synapse Admins), bumping the version in the topic
|
||||||
- #synapse-dev:matrix.org
|
- #synapse-dev:matrix.org
|
||||||
- #synapse-package-maintainers:matrix.org"""
|
- #synapse-package-maintainers:matrix.org
|
||||||
|
|
||||||
|
Ask the designated people to do the blog and tweets."""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||||
|
def full(gh_token: str) -> None:
|
||||||
|
click.echo("1. If this is a security release, read the security wiki page.")
|
||||||
|
click.echo("2. Check for any release blockers before proceeding.")
|
||||||
|
click.echo(" https://github.com/matrix-org/synapse/labels/X-Release-Blocker")
|
||||||
|
|
||||||
|
click.confirm("Ready?", abort=True)
|
||||||
|
|
||||||
|
click.echo("\n*** prepare ***")
|
||||||
|
_prepare()
|
||||||
|
|
||||||
|
click.echo("Deploy to matrix.org and ensure that it hasn't fallen over.")
|
||||||
|
click.echo("Remember to silence the alerts to prevent alert spam.")
|
||||||
|
click.confirm("Deployed?", abort=True)
|
||||||
|
|
||||||
|
click.echo("\n*** tag ***")
|
||||||
|
_tag(gh_token)
|
||||||
|
|
||||||
|
click.echo("\n*** wait for actions ***")
|
||||||
|
_wait_for_actions(gh_token)
|
||||||
|
|
||||||
|
click.echo("\n*** publish ***")
|
||||||
|
_publish(gh_token)
|
||||||
|
|
||||||
|
click.echo("\n*** upload ***")
|
||||||
|
_upload()
|
||||||
|
|
||||||
|
click.echo("\n*** merge back ***")
|
||||||
|
_merge_back()
|
||||||
|
|
||||||
|
click.echo("\nUpdate the Debian repository")
|
||||||
|
click.confirm("Started updating Debian repository?", abort=True)
|
||||||
|
|
||||||
|
click.echo("\nWait for all release methods to be ready.")
|
||||||
|
# Docker should be ready because it was done by the workflows earlier
|
||||||
|
# PyPI should be ready because we just ran upload().
|
||||||
|
# TODO Automatically poll until the Debs have made it to packages.matrix.org
|
||||||
|
click.confirm("Debs ready?", abort=True)
|
||||||
|
|
||||||
|
click.echo("\n*** announce ***")
|
||||||
|
_announce()
|
||||||
|
|
||||||
|
|
||||||
def get_package_version() -> version.Version:
|
def get_package_version() -> version.Version:
|
||||||
version_string = subprocess.check_output(["poetry", "version", "--short"]).decode(
|
version_string = subprocess.check_output(["poetry", "version", "--short"]).decode(
|
||||||
"utf-8"
|
"utf-8"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
# Copyright 2018 New Vector
|
# Copyright 2018 New Vector
|
||||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
# Copyright 2021-22 The Matrix.org Foundation C.I.C.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -20,11 +20,22 @@ import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import Callable, Optional
|
from typing import Any, Callable, Dict, Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
_CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
|
||||||
|
Conflicting options 'registration_shared_secret' and 'registration_shared_secret_path'
|
||||||
|
are both defined in config file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_NO_SHARED_SECRET_OPTS_ERROR = """\
|
||||||
|
No 'registration_shared_secret' or 'registration_shared_secret_path' defined in config.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DEFAULT_SERVER_URL = "http://localhost:8008"
|
||||||
|
|
||||||
|
|
||||||
def request_registration(
|
def request_registration(
|
||||||
user: str,
|
user: str,
|
||||||
|
@ -203,31 +214,104 @@ def main() -> None:
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"server_url",
|
"server_url",
|
||||||
default="https://localhost:8448",
|
|
||||||
nargs="?",
|
nargs="?",
|
||||||
help="URL to use to talk to the homeserver. Defaults to "
|
help="URL to use to talk to the homeserver. By default, tries to find a "
|
||||||
" 'https://localhost:8448'.",
|
"suitable URL from the configuration file. Otherwise, defaults to "
|
||||||
|
f"'{_DEFAULT_SERVER_URL}'.",
|
||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if "config" in args and args.config:
|
if "config" in args and args.config:
|
||||||
config = yaml.safe_load(args.config)
|
config = yaml.safe_load(args.config)
|
||||||
secret = config.get("registration_shared_secret", None)
|
|
||||||
if not secret:
|
if args.shared_secret:
|
||||||
print("No 'registration_shared_secret' defined in config.")
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
secret = args.shared_secret
|
secret = args.shared_secret
|
||||||
|
else:
|
||||||
|
# argparse should check that we have either config or shared secret
|
||||||
|
assert config
|
||||||
|
|
||||||
|
secret = config.get("registration_shared_secret")
|
||||||
|
secret_file = config.get("registration_shared_secret_path")
|
||||||
|
if secret_file:
|
||||||
|
if secret:
|
||||||
|
print(_CONFLICTING_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
secret = _read_file(secret_file, "registration_shared_secret_path").strip()
|
||||||
|
if not secret:
|
||||||
|
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.server_url:
|
||||||
|
server_url = args.server_url
|
||||||
|
elif config:
|
||||||
|
server_url = _find_client_listener(config)
|
||||||
|
if not server_url:
|
||||||
|
server_url = _DEFAULT_SERVER_URL
|
||||||
|
print(
|
||||||
|
"Unable to find a suitable HTTP listener in the configuration file. "
|
||||||
|
f"Trying {server_url} as a last resort.",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
server_url = _DEFAULT_SERVER_URL
|
||||||
|
print(
|
||||||
|
f"No server url or configuration file given. Defaulting to {server_url}.",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
|
||||||
admin = None
|
admin = None
|
||||||
if args.admin or args.no_admin:
|
if args.admin or args.no_admin:
|
||||||
admin = args.admin
|
admin = args.admin
|
||||||
|
|
||||||
register_new_user(
|
register_new_user(
|
||||||
args.user, args.password, args.server_url, secret, admin, args.user_type
|
args.user, args.password, server_url, secret, admin, args.user_type
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _read_file(file_path: Any, config_path: str) -> str:
|
||||||
|
"""Check the given file exists, and read it into a string
|
||||||
|
|
||||||
|
If it does not, exit with an error indicating the problem
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: the file to be read
|
||||||
|
config_path: where in the configuration file_path came from, so that a useful
|
||||||
|
error can be emitted if it does not exist.
|
||||||
|
Returns:
|
||||||
|
content of the file.
|
||||||
|
"""
|
||||||
|
if not isinstance(file_path, str):
|
||||||
|
print(f"{config_path} setting is not a string", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(file_path) as file_stream:
|
||||||
|
return file_stream.read()
|
||||||
|
except OSError as e:
|
||||||
|
print(f"Error accessing file {file_path}: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
|
||||||
|
# try to find a listener in the config. Returns a host:port pair
|
||||||
|
for listener in config.get("listeners", []):
|
||||||
|
if listener.get("type") != "http" or listener.get("tls", False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not any(
|
||||||
|
name == "client"
|
||||||
|
for resource in listener.get("resources", [])
|
||||||
|
for name in resource.get("names", [])
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# TODO: consider bind_addresses
|
||||||
|
return f"http://localhost:{listener['port']}"
|
||||||
|
|
||||||
|
# no suitable listeners?
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -258,7 +258,6 @@ class GuestAccess:
|
||||||
class ReceiptTypes:
|
class ReceiptTypes:
|
||||||
READ: Final = "m.read"
|
READ: Final = "m.read"
|
||||||
READ_PRIVATE: Final = "m.read.private"
|
READ_PRIVATE: Final = "m.read.private"
|
||||||
UNSTABLE_READ_PRIVATE: Final = "org.matrix.msc2285.read.private"
|
|
||||||
FULLY_READ: Final = "m.fully_read"
|
FULLY_READ: Final = "m.fully_read"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -140,13 +140,13 @@ USER_FILTER_SCHEMA = {
|
||||||
|
|
||||||
|
|
||||||
@FormatChecker.cls_checks("matrix_room_id")
|
@FormatChecker.cls_checks("matrix_room_id")
|
||||||
def matrix_room_id_validator(room_id_str: str) -> RoomID:
|
def matrix_room_id_validator(room_id_str: str) -> bool:
|
||||||
return RoomID.from_string(room_id_str)
|
return RoomID.is_valid(room_id_str)
|
||||||
|
|
||||||
|
|
||||||
@FormatChecker.cls_checks("matrix_user_id")
|
@FormatChecker.cls_checks("matrix_user_id")
|
||||||
def matrix_user_id_validator(user_id_str: str) -> UserID:
|
def matrix_user_id_validator(user_id_str: str) -> bool:
|
||||||
return UserID.from_string(user_id_str)
|
return UserID.is_valid(user_id_str)
|
||||||
|
|
||||||
|
|
||||||
class Filtering:
|
class Filtering:
|
||||||
|
|
|
@ -266,15 +266,48 @@ def register_start(
|
||||||
reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper()))
|
reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper()))
|
||||||
|
|
||||||
|
|
||||||
def listen_metrics(bind_addresses: Iterable[str], port: int) -> None:
|
def listen_metrics(
|
||||||
|
bind_addresses: Iterable[str], port: int, enable_legacy_metric_names: bool
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Start Prometheus metrics server.
|
Start Prometheus metrics server.
|
||||||
"""
|
"""
|
||||||
from synapse.metrics import RegistryProxy, start_http_server
|
from prometheus_client import start_http_server as start_http_server_prometheus
|
||||||
|
|
||||||
|
from synapse.metrics import (
|
||||||
|
RegistryProxy,
|
||||||
|
start_http_server as start_http_server_legacy,
|
||||||
|
)
|
||||||
|
|
||||||
for host in bind_addresses:
|
for host in bind_addresses:
|
||||||
logger.info("Starting metrics listener on %s:%d", host, port)
|
logger.info("Starting metrics listener on %s:%d", host, port)
|
||||||
start_http_server(port, addr=host, registry=RegistryProxy)
|
if enable_legacy_metric_names:
|
||||||
|
start_http_server_legacy(port, addr=host, registry=RegistryProxy)
|
||||||
|
else:
|
||||||
|
_set_prometheus_client_use_created_metrics(False)
|
||||||
|
start_http_server_prometheus(port, addr=host, registry=RegistryProxy)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_prometheus_client_use_created_metrics(new_value: bool) -> None:
|
||||||
|
"""
|
||||||
|
Sets whether prometheus_client should expose `_created`-suffixed metrics for
|
||||||
|
all gauges, histograms and summaries.
|
||||||
|
There is no programmatic way to disable this without poking at internals;
|
||||||
|
the proper way is to use an environment variable which prometheus_client
|
||||||
|
loads at import time.
|
||||||
|
|
||||||
|
The motivation for disabling these `_created` metrics is that they're
|
||||||
|
a waste of space as they're not useful but they take up space in Prometheus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import prometheus_client.metrics
|
||||||
|
|
||||||
|
if hasattr(prometheus_client.metrics, "_use_created"):
|
||||||
|
prometheus_client.metrics._use_created = new_value
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"Can't disable `_created` metrics in prometheus_client (brittle hack broken?)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def listen_manhole(
|
def listen_manhole(
|
||||||
|
@ -478,9 +511,10 @@ async def start(hs: "HomeServer") -> None:
|
||||||
setup_sentry(hs)
|
setup_sentry(hs)
|
||||||
setup_sdnotify(hs)
|
setup_sdnotify(hs)
|
||||||
|
|
||||||
# If background tasks are running on the main process, start collecting the
|
# If background tasks are running on the main process or this is the worker in
|
||||||
# phone home stats.
|
# charge of them, start collecting the phone home stats and shared usage metrics.
|
||||||
if hs.config.worker.run_background_tasks:
|
if hs.config.worker.run_background_tasks:
|
||||||
|
await hs.get_common_usage_metrics_manager().setup()
|
||||||
start_phone_stats_home(hs)
|
start_phone_stats_home(hs)
|
||||||
|
|
||||||
# We now freeze all allocated objects in the hopes that (almost)
|
# We now freeze all allocated objects in the hopes that (almost)
|
||||||
|
|
|
@ -412,7 +412,11 @@ class GenericWorkerServer(HomeServer):
|
||||||
"enable_metrics is not True!"
|
"enable_metrics is not True!"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener.bind_addresses, listener.port)
|
_base.listen_metrics(
|
||||||
|
listener.bind_addresses,
|
||||||
|
listener.port,
|
||||||
|
enable_legacy_metric_names=self.config.metrics.enable_legacy_metrics,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.warning("Unsupported listener type: %s", listener.type)
|
logger.warning("Unsupported listener type: %s", listener.type)
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,6 @@ from synapse.http.site import SynapseSite
|
||||||
from synapse.logging.context import LoggingContext
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||||
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
|
|
||||||
from synapse.rest import ClientRestResource
|
from synapse.rest import ClientRestResource
|
||||||
from synapse.rest.admin import AdminRestResource
|
from synapse.rest.admin import AdminRestResource
|
||||||
from synapse.rest.health import HealthResource
|
from synapse.rest.health import HealthResource
|
||||||
|
@ -290,16 +289,6 @@ class SynapseHomeServer(HomeServer):
|
||||||
manhole_settings=self.config.server.manhole_settings,
|
manhole_settings=self.config.server.manhole_settings,
|
||||||
manhole_globals={"hs": self},
|
manhole_globals={"hs": self},
|
||||||
)
|
)
|
||||||
elif listener.type == "replication":
|
|
||||||
services = listen_tcp(
|
|
||||||
listener.bind_addresses,
|
|
||||||
listener.port,
|
|
||||||
ReplicationStreamProtocolFactory(self),
|
|
||||||
)
|
|
||||||
for s in services:
|
|
||||||
self.get_reactor().addSystemEventTrigger(
|
|
||||||
"before", "shutdown", s.stopListening
|
|
||||||
)
|
|
||||||
elif listener.type == "metrics":
|
elif listener.type == "metrics":
|
||||||
if not self.config.metrics.enable_metrics:
|
if not self.config.metrics.enable_metrics:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
@ -307,7 +296,11 @@ class SynapseHomeServer(HomeServer):
|
||||||
"enable_metrics is not True!"
|
"enable_metrics is not True!"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(listener.bind_addresses, listener.port)
|
_base.listen_metrics(
|
||||||
|
listener.bind_addresses,
|
||||||
|
listener.port,
|
||||||
|
enable_legacy_metric_names=self.config.metrics.enable_legacy_metrics,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# this shouldn't happen, as the listener type should have been checked
|
# this shouldn't happen, as the listener type should have been checked
|
||||||
# during parsing
|
# during parsing
|
||||||
|
|
|
@ -51,6 +51,16 @@ async def phone_stats_home(
|
||||||
stats: JsonDict,
|
stats: JsonDict,
|
||||||
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
|
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""Collect usage statistics and send them to the configured endpoint.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
hs: the HomeServer object to use for gathering usage data.
|
||||||
|
stats: the dict in which to store the statistics sent to the configured
|
||||||
|
endpoint. Mostly used in tests to figure out the data that is supposed to
|
||||||
|
be sent.
|
||||||
|
stats_process: statistics about resource usage of the process.
|
||||||
|
"""
|
||||||
|
|
||||||
logger.info("Gathering stats for reporting")
|
logger.info("Gathering stats for reporting")
|
||||||
now = int(hs.get_clock().time())
|
now = int(hs.get_clock().time())
|
||||||
# Ensure the homeserver has started.
|
# Ensure the homeserver has started.
|
||||||
|
@ -83,6 +93,7 @@ async def phone_stats_home(
|
||||||
#
|
#
|
||||||
|
|
||||||
store = hs.get_datastores().main
|
store = hs.get_datastores().main
|
||||||
|
common_metrics = await hs.get_common_usage_metrics_manager().get_metrics()
|
||||||
|
|
||||||
stats["homeserver"] = hs.config.server.server_name
|
stats["homeserver"] = hs.config.server.server_name
|
||||||
stats["server_context"] = hs.config.server.server_context
|
stats["server_context"] = hs.config.server.server_context
|
||||||
|
@ -104,7 +115,7 @@ async def phone_stats_home(
|
||||||
room_count = await store.get_room_count()
|
room_count = await store.get_room_count()
|
||||||
stats["total_room_count"] = room_count
|
stats["total_room_count"] = room_count
|
||||||
|
|
||||||
stats["daily_active_users"] = await store.count_daily_users()
|
stats["daily_active_users"] = common_metrics.daily_active_users
|
||||||
stats["monthly_active_users"] = await store.count_monthly_users()
|
stats["monthly_active_users"] = await store.count_monthly_users()
|
||||||
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
|
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
|
||||||
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
|
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
|
||||||
|
|
|
@ -20,6 +20,7 @@ import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from enum import Enum, auto
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -603,18 +604,44 @@ class RootConfig:
|
||||||
" may specify directories containing *.yaml files.",
|
" may specify directories containing *.yaml files.",
|
||||||
)
|
)
|
||||||
|
|
||||||
generate_group = parser.add_argument_group("Config generation")
|
# we nest the mutually-exclusive group inside another group so that the help
|
||||||
generate_group.add_argument(
|
# text shows them in their own group.
|
||||||
"--generate-config",
|
generate_mode_group = parser.add_argument_group(
|
||||||
action="store_true",
|
"Config generation mode",
|
||||||
help="Generate a config file, then exit.",
|
|
||||||
)
|
)
|
||||||
generate_group.add_argument(
|
generate_mode_exclusive = generate_mode_group.add_mutually_exclusive_group()
|
||||||
|
generate_mode_exclusive.add_argument(
|
||||||
|
# hidden option to make the type and default work
|
||||||
|
"--generate-mode",
|
||||||
|
help=argparse.SUPPRESS,
|
||||||
|
type=_ConfigGenerateMode,
|
||||||
|
default=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN,
|
||||||
|
)
|
||||||
|
generate_mode_exclusive.add_argument(
|
||||||
|
"--generate-config",
|
||||||
|
help="Generate a config file, then exit.",
|
||||||
|
action="store_const",
|
||||||
|
const=_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT,
|
||||||
|
dest="generate_mode",
|
||||||
|
)
|
||||||
|
generate_mode_exclusive.add_argument(
|
||||||
"--generate-missing-configs",
|
"--generate-missing-configs",
|
||||||
"--generate-keys",
|
"--generate-keys",
|
||||||
action="store_true",
|
|
||||||
help="Generate any missing additional config files, then exit.",
|
help="Generate any missing additional config files, then exit.",
|
||||||
|
action="store_const",
|
||||||
|
const=_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT,
|
||||||
|
dest="generate_mode",
|
||||||
)
|
)
|
||||||
|
generate_mode_exclusive.add_argument(
|
||||||
|
"--generate-missing-and-run",
|
||||||
|
help="Generate any missing additional config files, then run. This is the "
|
||||||
|
"default behaviour.",
|
||||||
|
action="store_const",
|
||||||
|
const=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN,
|
||||||
|
dest="generate_mode",
|
||||||
|
)
|
||||||
|
|
||||||
|
generate_group = parser.add_argument_group("Details for --generate-config")
|
||||||
generate_group.add_argument(
|
generate_group.add_argument(
|
||||||
"-H", "--server-name", help="The server name to generate a config file for."
|
"-H", "--server-name", help="The server name to generate a config file for."
|
||||||
)
|
)
|
||||||
|
@ -670,11 +697,12 @@ class RootConfig:
|
||||||
config_dir_path = os.path.abspath(config_dir_path)
|
config_dir_path = os.path.abspath(config_dir_path)
|
||||||
data_dir_path = os.getcwd()
|
data_dir_path = os.getcwd()
|
||||||
|
|
||||||
generate_missing_configs = config_args.generate_missing_configs
|
|
||||||
|
|
||||||
obj = cls(config_files)
|
obj = cls(config_files)
|
||||||
|
|
||||||
if config_args.generate_config:
|
if (
|
||||||
|
config_args.generate_mode
|
||||||
|
== _ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT
|
||||||
|
):
|
||||||
if config_args.report_stats is None:
|
if config_args.report_stats is None:
|
||||||
parser.error(
|
parser.error(
|
||||||
"Please specify either --report-stats=yes or --report-stats=no\n\n"
|
"Please specify either --report-stats=yes or --report-stats=no\n\n"
|
||||||
|
@ -732,11 +760,14 @@ class RootConfig:
|
||||||
)
|
)
|
||||||
% (config_path,)
|
% (config_path,)
|
||||||
)
|
)
|
||||||
generate_missing_configs = True
|
|
||||||
|
|
||||||
config_dict = read_config_files(config_files)
|
config_dict = read_config_files(config_files)
|
||||||
if generate_missing_configs:
|
|
||||||
obj.generate_missing_files(config_dict, config_dir_path)
|
obj.generate_missing_files(config_dict, config_dir_path)
|
||||||
|
|
||||||
|
if config_args.generate_mode in (
|
||||||
|
_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT,
|
||||||
|
_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT,
|
||||||
|
):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
obj.parse_config_dict(
|
obj.parse_config_dict(
|
||||||
|
@ -965,6 +996,12 @@ def read_file(file_path: Any, config_path: Iterable[str]) -> str:
|
||||||
raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e
|
raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e
|
||||||
|
|
||||||
|
|
||||||
|
class _ConfigGenerateMode(Enum):
|
||||||
|
GENERATE_MISSING_AND_RUN = auto()
|
||||||
|
GENERATE_MISSING_AND_EXIT = auto()
|
||||||
|
GENERATE_EVERYTHING_AND_EXIT = auto()
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Config",
|
"Config",
|
||||||
"RootConfig",
|
"RootConfig",
|
||||||
|
|
|
@ -32,9 +32,6 @@ class ExperimentalConfig(Config):
|
||||||
# MSC2716 (importing historical messages)
|
# MSC2716 (importing historical messages)
|
||||||
self.msc2716_enabled: bool = experimental.get("msc2716_enabled", False)
|
self.msc2716_enabled: bool = experimental.get("msc2716_enabled", False)
|
||||||
|
|
||||||
# MSC2285 (unstable private read receipts)
|
|
||||||
self.msc2285_enabled: bool = experimental.get("msc2285_enabled", False)
|
|
||||||
|
|
||||||
# MSC3244 (room version capabilities)
|
# MSC3244 (room version capabilities)
|
||||||
self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True)
|
self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True)
|
||||||
|
|
||||||
|
@ -74,6 +71,9 @@ class ExperimentalConfig(Config):
|
||||||
self.msc3720_enabled: bool = experimental.get("msc3720_enabled", False)
|
self.msc3720_enabled: bool = experimental.get("msc3720_enabled", False)
|
||||||
|
|
||||||
# MSC2654: Unread counts
|
# MSC2654: Unread counts
|
||||||
|
#
|
||||||
|
# Note that enabling this will result in an incorrect unread count for
|
||||||
|
# previously calculated push actions.
|
||||||
self.msc2654_enabled: bool = experimental.get("msc2654_enabled", False)
|
self.msc2654_enabled: bool = experimental.get("msc2654_enabled", False)
|
||||||
|
|
||||||
# MSC2815 (allow room moderators to view redacted event content)
|
# MSC2815 (allow room moderators to view redacted event content)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue