Merge remote-tracking branch 'origin/develop' into matrix-org-hotfixes
commit
b272e7345f
|
@ -17,6 +17,6 @@ services:
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
working_dir: /app
|
working_dir: /src
|
||||||
volumes:
|
volumes:
|
||||||
- ..:/app
|
- ..:/src
|
||||||
|
|
|
@ -17,6 +17,6 @@ services:
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
working_dir: /app
|
working_dir: /src
|
||||||
volumes:
|
volumes:
|
||||||
- ..:/app
|
- ..:/src
|
||||||
|
|
|
@ -17,6 +17,6 @@ services:
|
||||||
SYNAPSE_POSTGRES_HOST: postgres
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
working_dir: /app
|
working_dir: /src
|
||||||
volumes:
|
volumes:
|
||||||
- ..:/app
|
- ..:/src
|
||||||
|
|
|
@ -27,7 +27,7 @@ git config --global user.name "A robot"
|
||||||
|
|
||||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||||
git fetch -u origin $GITBASE
|
git fetch -u origin $GITBASE
|
||||||
git merge --no-edit origin/$GITBASE
|
git merge --no-edit --no-commit origin/$GITBASE
|
||||||
|
|
||||||
# Show what we are after.
|
# Show what we are after.
|
||||||
git --no-pager show -s
|
git --no-pager show -s
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
env:
|
env:
|
||||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
COVERALLS_REPO_TOKEN: wsJWOby6j0uCYFiCes3r0XauxO27mx8lD
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e check_codestyle"
|
- "tox -e check_codestyle"
|
||||||
|
@ -10,6 +9,7 @@ steps:
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
|
@ -18,6 +18,7 @@ steps:
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
|
@ -26,6 +27,7 @@ steps:
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
|
@ -36,6 +38,7 @@ steps:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
|
@ -44,6 +47,7 @@ steps:
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
|
@ -52,21 +56,26 @@ steps:
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.5"
|
image: "python:3.5"
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
|
||||||
- wait
|
- wait
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
|
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
|
||||||
- "python3.5 -m pip install tox"
|
- "python3.5 -m pip install tox"
|
||||||
- "tox -e py35-old,codecov"
|
- "tox -e py35-old,combine"
|
||||||
label: ":python: 3.5 / SQLite / Old Deps"
|
label: ":python: 3.5 / SQLite / Old Deps"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 2"
|
TRIAL_FLAGS: "-j 2"
|
||||||
LANG: "C.UTF-8"
|
LANG: "C.UTF-8"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "ubuntu:xenial" # We use xenail to get an old sqlite and python
|
image: "ubuntu:xenial" # We use xenial to get an old sqlite and python
|
||||||
|
workdir: "/src"
|
||||||
|
mount-buildkite-agent: false
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -76,14 +85,18 @@ steps:
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e py35,codecov"
|
- "tox -e py35,combine"
|
||||||
label: ":python: 3.5 / SQLite"
|
label: ":python: 3.5 / SQLite"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 2"
|
TRIAL_FLAGS: "-j 2"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.5"
|
image: "python:3.5"
|
||||||
|
workdir: "/src"
|
||||||
|
mount-buildkite-agent: false
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -93,14 +106,18 @@ steps:
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e py36,codecov"
|
- "tox -e py36,combine"
|
||||||
label: ":python: 3.6 / SQLite"
|
label: ":python: 3.6 / SQLite"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 2"
|
TRIAL_FLAGS: "-j 2"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.6"
|
image: "python:3.6"
|
||||||
|
workdir: "/src"
|
||||||
|
mount-buildkite-agent: false
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -110,14 +127,18 @@ steps:
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e py37,codecov"
|
- "tox -e py37,combine"
|
||||||
label: ":python: 3.7 / SQLite"
|
label: ":python: 3.7 / SQLite"
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 2"
|
TRIAL_FLAGS: "-j 2"
|
||||||
plugins:
|
plugins:
|
||||||
- docker#v3.0.1:
|
- docker#v3.0.1:
|
||||||
image: "python:3.7"
|
image: "python:3.7"
|
||||||
|
workdir: "/src"
|
||||||
|
mount-buildkite-agent: false
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -131,12 +152,14 @@ steps:
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 8"
|
TRIAL_FLAGS: "-j 8"
|
||||||
command:
|
command:
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,combine'"
|
||||||
plugins:
|
plugins:
|
||||||
- docker-compose#v2.1.0:
|
- docker-compose#v2.1.0:
|
||||||
run: testenv
|
run: testenv
|
||||||
config:
|
config:
|
||||||
- .buildkite/docker-compose.py35.pg95.yaml
|
- .buildkite/docker-compose.py35.pg95.yaml
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -150,12 +173,14 @@ steps:
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 8"
|
TRIAL_FLAGS: "-j 8"
|
||||||
command:
|
command:
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,combine'"
|
||||||
plugins:
|
plugins:
|
||||||
- docker-compose#v2.1.0:
|
- docker-compose#v2.1.0:
|
||||||
run: testenv
|
run: testenv
|
||||||
config:
|
config:
|
||||||
- .buildkite/docker-compose.py37.pg95.yaml
|
- .buildkite/docker-compose.py37.pg95.yaml
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -169,12 +194,14 @@ steps:
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "-j 8"
|
TRIAL_FLAGS: "-j 8"
|
||||||
command:
|
command:
|
||||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,combine'"
|
||||||
plugins:
|
plugins:
|
||||||
- docker-compose#v2.1.0:
|
- docker-compose#v2.1.0:
|
||||||
run: testenv
|
run: testenv
|
||||||
config:
|
config:
|
||||||
- .buildkite/docker-compose.py37.pg11.yaml
|
- .buildkite/docker-compose.py37.pg11.yaml
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -182,7 +209,6 @@ steps:
|
||||||
- exit_status: 2
|
- exit_status: 2
|
||||||
limit: 2
|
limit: 2
|
||||||
|
|
||||||
|
|
||||||
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||||
agents:
|
agents:
|
||||||
queue: "medium"
|
queue: "medium"
|
||||||
|
@ -195,6 +221,16 @@ steps:
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
always-pull: true
|
always-pull: true
|
||||||
workdir: "/src"
|
workdir: "/src"
|
||||||
|
entrypoint: ["/bin/sh", "-e", "-c"]
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
volumes: ["./logs:/logs"]
|
||||||
|
- artifacts#v1.2.0:
|
||||||
|
upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/coverage.xml" ]
|
||||||
|
- matrix-org/annotate:
|
||||||
|
path: "logs/annotate.md"
|
||||||
|
style: "error"
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -216,6 +252,16 @@ steps:
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
always-pull: true
|
always-pull: true
|
||||||
workdir: "/src"
|
workdir: "/src"
|
||||||
|
entrypoint: ["/bin/sh", "-e", "-c"]
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
volumes: ["./logs:/logs"]
|
||||||
|
- artifacts#v1.2.0:
|
||||||
|
upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/coverage.xml" ]
|
||||||
|
- matrix-org/annotate:
|
||||||
|
path: "logs/annotate.md"
|
||||||
|
style: "error"
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
|
@ -240,9 +286,25 @@ steps:
|
||||||
propagate-environment: true
|
propagate-environment: true
|
||||||
always-pull: true
|
always-pull: true
|
||||||
workdir: "/src"
|
workdir: "/src"
|
||||||
|
entrypoint: ["/bin/sh", "-e", "-c"]
|
||||||
|
mount-buildkite-agent: false
|
||||||
|
volumes: ["./logs:/logs"]
|
||||||
|
- artifacts#v1.2.0:
|
||||||
|
upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/coverage.xml" ]
|
||||||
|
- matrix-org/annotate:
|
||||||
|
path: "logs/annotate.md"
|
||||||
|
style: "error"
|
||||||
|
- matrix-org/coveralls#v1.0:
|
||||||
|
parallel: "true"
|
||||||
retry:
|
retry:
|
||||||
automatic:
|
automatic:
|
||||||
- exit_status: -1
|
- exit_status: -1
|
||||||
limit: 2
|
limit: 2
|
||||||
- exit_status: 2
|
- exit_status: 2
|
||||||
limit: 2
|
limit: 2
|
||||||
|
|
||||||
|
- wait: ~
|
||||||
|
continue_on_failure: true
|
||||||
|
|
||||||
|
- label: Trigger webhook
|
||||||
|
command: "curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d \"payload[build_num]=$BUILDKITE_BUILD_NUMBER&payload[status]=done\""
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
[run]
|
[run]
|
||||||
branch = True
|
branch = True
|
||||||
parallel = True
|
parallel = True
|
||||||
include = synapse/*
|
include=$TOP/synapse/*
|
||||||
|
data_file = $TOP/.coverage
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
precision = 2
|
precision = 2
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
Switch to the v2 lookup API for 3PID invites.
|
|
|
@ -0,0 +1 @@
|
||||||
|
Update Buildkite pipeline to use plugins instead of buildkite-agent commands.
|
|
@ -282,16 +282,3 @@ class IdentityHandler(BaseHandler):
|
||||||
except HttpResponseException as e:
|
except HttpResponseException as e:
|
||||||
logger.info("Proxied requestToken failed: %r", e)
|
logger.info("Proxied requestToken failed: %r", e)
|
||||||
raise e.to_synapse_error()
|
raise e.to_synapse_error()
|
||||||
|
|
||||||
|
|
||||||
class LookupAlgorithm:
|
|
||||||
"""
|
|
||||||
Supported hashing algorithms when performing a 3PID lookup.
|
|
||||||
|
|
||||||
SHA256 - Hashing an (address, medium, pepper) combo with sha256, then url-safe base64
|
|
||||||
encoding
|
|
||||||
NONE - Not performing any hashing. Simply sending an (address, medium) combo in plaintext
|
|
||||||
"""
|
|
||||||
|
|
||||||
SHA256 = "sha256"
|
|
||||||
NONE = "none"
|
|
||||||
|
|
|
@ -29,11 +29,9 @@ from twisted.internet import defer
|
||||||
from synapse import types
|
from synapse import types
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError
|
from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError
|
||||||
from synapse.handlers.identity import LookupAlgorithm
|
|
||||||
from synapse.types import RoomID, UserID
|
from synapse.types import RoomID, UserID
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.distributor import user_joined_room, user_left_room
|
from synapse.util.distributor import user_joined_room, user_left_room
|
||||||
from synapse.util.hash import sha256_and_url_safe_base64
|
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
||||||
|
@ -545,7 +543,7 @@ class RoomMemberHandler(object):
|
||||||
event (SynapseEvent): The membership event.
|
event (SynapseEvent): The membership event.
|
||||||
context: The context of the event.
|
context: The context of the event.
|
||||||
is_guest (bool): Whether the sender is a guest.
|
is_guest (bool): Whether the sender is a guest.
|
||||||
remote_room_hosts (list[str]|None): Homeservers which are likely to already be in
|
room_hosts ([str]): Homeservers which are likely to already be in
|
||||||
the room, and could be danced with in order to join this
|
the room, and could be danced with in order to join this
|
||||||
homeserver for the first time.
|
homeserver for the first time.
|
||||||
ratelimit (bool): Whether to rate limit this request.
|
ratelimit (bool): Whether to rate limit this request.
|
||||||
|
@ -656,7 +654,7 @@ class RoomMemberHandler(object):
|
||||||
servers.remove(room_alias.domain)
|
servers.remove(room_alias.domain)
|
||||||
servers.insert(0, room_alias.domain)
|
servers.insert(0, room_alias.domain)
|
||||||
|
|
||||||
return RoomID.from_string(room_id), servers
|
return (RoomID.from_string(room_id), servers)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _get_inviter(self, user_id, room_id):
|
def _get_inviter(self, user_id, room_id):
|
||||||
|
@ -719,44 +717,6 @@ class RoomMemberHandler(object):
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
403, "Looking up third-party identifiers is denied from this server"
|
403, "Looking up third-party identifiers is denied from this server"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check what hashing details are supported by this identity server
|
|
||||||
use_v1 = False
|
|
||||||
hash_details = None
|
|
||||||
try:
|
|
||||||
hash_details = yield self.simple_http_client.get_json(
|
|
||||||
"%s%s/_matrix/identity/v2/hash_details" % (id_server_scheme, id_server)
|
|
||||||
)
|
|
||||||
except (HttpResponseException, ValueError) as e:
|
|
||||||
# Catch HttpResponseExcept for a non-200 response code
|
|
||||||
# Catch ValueError for non-JSON response body
|
|
||||||
|
|
||||||
# Check if this identity server does not know about v2 lookups
|
|
||||||
if e.code == 404:
|
|
||||||
# This is an old identity server that does not yet support v2 lookups
|
|
||||||
use_v1 = True
|
|
||||||
else:
|
|
||||||
logger.warn("Error when looking up hashing details: %s" % (e,))
|
|
||||||
return None
|
|
||||||
|
|
||||||
if use_v1:
|
|
||||||
return (yield self._lookup_3pid_v1(id_server, medium, address))
|
|
||||||
|
|
||||||
return (yield self._lookup_3pid_v2(id_server, medium, address, hash_details))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _lookup_3pid_v1(self, id_server, medium, address):
|
|
||||||
"""Looks up a 3pid in the passed identity server using v1 lookup.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
id_server (str): The server name (including port, if required)
|
|
||||||
of the identity server to use.
|
|
||||||
medium (str): The type of the third party identifier (e.g. "email").
|
|
||||||
address (str): The third party identifier (e.g. "foo@example.com").
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: the matrix ID of the 3pid, or None if it is not recognized.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
data = yield self.simple_http_client.get_json(
|
data = yield self.simple_http_client.get_json(
|
||||||
"%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server),
|
"%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server),
|
||||||
|
@ -771,83 +731,8 @@ class RoomMemberHandler(object):
|
||||||
|
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.warn("Error from identity server lookup: %s" % (e,))
|
logger.warn("Error from identity server lookup: %s" % (e,))
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _lookup_3pid_v2(self, id_server, medium, address, hash_details):
|
|
||||||
"""Looks up a 3pid in the passed identity server using v2 lookup.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
id_server (str): The server name (including port, if required)
|
|
||||||
of the identity server to use.
|
|
||||||
medium (str): The type of the third party identifier (e.g. "email").
|
|
||||||
address (str): The third party identifier (e.g. "foo@example.com").
|
|
||||||
hash_details (dict[str, str|list]): A dictionary containing hashing information
|
|
||||||
provided by an identity server.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deferred[str|None]: the matrix ID of the 3pid, or None if it is not recognised.
|
|
||||||
"""
|
|
||||||
# Extract information from hash_details
|
|
||||||
supported_lookup_algorithms = hash_details["algorithms"]
|
|
||||||
lookup_pepper = hash_details["lookup_pepper"]
|
|
||||||
|
|
||||||
# Check if any of the supported lookup algorithms are present
|
|
||||||
if LookupAlgorithm.SHA256 in supported_lookup_algorithms:
|
|
||||||
# Perform a hashed lookup
|
|
||||||
lookup_algorithm = LookupAlgorithm.SHA256
|
|
||||||
|
|
||||||
# Hash address, medium and the pepper with sha256
|
|
||||||
to_hash = "%s %s %s" % (address, medium, lookup_pepper)
|
|
||||||
lookup_value = sha256_and_url_safe_base64(to_hash)
|
|
||||||
|
|
||||||
elif LookupAlgorithm.NONE in supported_lookup_algorithms:
|
|
||||||
# Perform a non-hashed lookup
|
|
||||||
lookup_algorithm = LookupAlgorithm.NONE
|
|
||||||
|
|
||||||
# Combine together plaintext address and medium
|
|
||||||
lookup_value = "%s %s" % (address, medium)
|
|
||||||
|
|
||||||
else:
|
|
||||||
logger.warn(
|
|
||||||
"None of the provided lookup algorithms of %s%s are supported: %s",
|
|
||||||
id_server_scheme,
|
|
||||||
id_server,
|
|
||||||
hash_details["algorithms"],
|
|
||||||
)
|
|
||||||
raise SynapseError(
|
|
||||||
400,
|
|
||||||
"Provided identity server does not support any v2 lookup "
|
|
||||||
"algorithms that this homeserver supports.",
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
lookup_results = yield self.simple_http_client.post_json_get_json(
|
|
||||||
"%s%s/_matrix/identity/v2/lookup" % (id_server_scheme, id_server),
|
|
||||||
{
|
|
||||||
"addresses": [lookup_value],
|
|
||||||
"algorithm": lookup_algorithm,
|
|
||||||
"pepper": lookup_pepper,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
except (HttpResponseException, ValueError) as e:
|
|
||||||
# Catch HttpResponseExcept for a non-200 response code
|
|
||||||
# Catch ValueError for non-JSON response body
|
|
||||||
logger.warn("Error when performing a 3pid lookup: %s" % (e,))
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Check for a mapping from what we looked up to an MXID
|
|
||||||
if "mappings" not in lookup_results or not isinstance(
|
|
||||||
lookup_results["mappings"], dict
|
|
||||||
):
|
|
||||||
logger.debug("No results from 3pid lookup")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Return the MXID if it's available, or None otherwise
|
|
||||||
mxid = lookup_results["mappings"].get(lookup_value)
|
|
||||||
return mxid
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _verify_any_signature(self, data, server_hostname):
|
def _verify_any_signature(self, data, server_hostname):
|
||||||
if server_hostname not in data["signatures"]:
|
if server_hostname not in data["signatures"]:
|
||||||
|
@ -1097,7 +982,9 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||||
)
|
)
|
||||||
|
|
||||||
if complexity:
|
if complexity:
|
||||||
return complexity["v1"] > max_complexity
|
if complexity["v1"] > max_complexity:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -1113,7 +1000,10 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||||
max_complexity = self.hs.config.limit_remote_rooms.complexity
|
max_complexity = self.hs.config.limit_remote_rooms.complexity
|
||||||
complexity = yield self.store.get_room_complexity(room_id)
|
complexity = yield self.store.get_room_complexity(room_id)
|
||||||
|
|
||||||
return complexity["v1"] > max_complexity
|
if complexity["v1"] > max_complexity:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _remote_join(self, requester, remote_room_hosts, room_id, user, content):
|
def _remote_join(self, requester, remote_room_hosts, room_id, user, content):
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
import unpaddedbase64
|
|
||||||
|
|
||||||
|
|
||||||
def sha256_and_url_safe_base64(input_text):
|
|
||||||
"""SHA256 hash an input string, encode the digest as url-safe base64, and
|
|
||||||
return
|
|
||||||
|
|
||||||
:param input_text: string to hash
|
|
||||||
:type input_text: str
|
|
||||||
|
|
||||||
:returns a sha256 hashed and url-safe base64 encoded digest
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
digest = hashlib.sha256(input_text.encode()).digest()
|
|
||||||
return unpaddedbase64.encode_base64(digest, urlsafe=True)
|
|
18
tox.ini
18
tox.ini
|
@ -7,6 +7,7 @@ deps =
|
||||||
python-subunit
|
python-subunit
|
||||||
junitxml
|
junitxml
|
||||||
coverage
|
coverage
|
||||||
|
coverage-enable-subprocess
|
||||||
parameterized
|
parameterized
|
||||||
|
|
||||||
# cyptography 2.2 requires setuptools >= 18.5
|
# cyptography 2.2 requires setuptools >= 18.5
|
||||||
|
@ -43,13 +44,13 @@ whitelist_externals =
|
||||||
setenv =
|
setenv =
|
||||||
{[base]setenv}
|
{[base]setenv}
|
||||||
postgres: SYNAPSE_POSTGRES = 1
|
postgres: SYNAPSE_POSTGRES = 1
|
||||||
|
TOP={toxinidir}
|
||||||
|
|
||||||
passenv = *
|
passenv = *
|
||||||
|
|
||||||
commands =
|
commands =
|
||||||
/usr/bin/find "{toxinidir}" -name '*.pyc' -delete
|
/usr/bin/find "{toxinidir}" -name '*.pyc' -delete
|
||||||
# Add this so that coverage will run on subprocesses
|
# Add this so that coverage will run on subprocesses
|
||||||
sh -c 'echo "import coverage; coverage.process_startup()" > {envsitepackagesdir}/../sitecustomize.py'
|
|
||||||
{envbindir}/coverage run "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
|
{envbindir}/coverage run "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
|
||||||
|
|
||||||
# As of twisted 16.4, trial tries to import the tests as a package (previously
|
# As of twisted 16.4, trial tries to import the tests as a package (previously
|
||||||
|
@ -75,8 +76,6 @@ commands =
|
||||||
# )
|
# )
|
||||||
usedevelop=true
|
usedevelop=true
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# A test suite for the oldest supported versions of Python libraries, to catch
|
# A test suite for the oldest supported versions of Python libraries, to catch
|
||||||
# any uses of APIs not available in them.
|
# any uses of APIs not available in them.
|
||||||
[testenv:py35-old]
|
[testenv:py35-old]
|
||||||
|
@ -88,6 +87,7 @@ deps =
|
||||||
mock
|
mock
|
||||||
lxml
|
lxml
|
||||||
coverage
|
coverage
|
||||||
|
coverage-enable-subprocess
|
||||||
|
|
||||||
commands =
|
commands =
|
||||||
/usr/bin/find "{toxinidir}" -name '*.pyc' -delete
|
/usr/bin/find "{toxinidir}" -name '*.pyc' -delete
|
||||||
|
@ -96,15 +96,11 @@ commands =
|
||||||
# OpenSSL 1.1 compiled cryptography (as older ones don't compile on Travis).
|
# OpenSSL 1.1 compiled cryptography (as older ones don't compile on Travis).
|
||||||
/bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "s/psycopg2==2.6//" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs -d"\n" pip install'
|
/bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "s/psycopg2==2.6//" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs -d"\n" pip install'
|
||||||
|
|
||||||
# Add this so that coverage will run on subprocesses
|
|
||||||
/bin/sh -c 'echo "import coverage; coverage.process_startup()" > {envsitepackagesdir}/../sitecustomize.py'
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
# Install Synapse itself. This won't update any libraries.
|
||||||
pip install -e .
|
pip install -e .
|
||||||
|
|
||||||
{envbindir}/coverage run "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
|
{envbindir}/coverage run "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
|
||||||
|
|
||||||
|
|
||||||
[testenv:packaging]
|
[testenv:packaging]
|
||||||
skip_install=True
|
skip_install=True
|
||||||
deps =
|
deps =
|
||||||
|
@ -137,15 +133,15 @@ basepython = python3.6
|
||||||
[testenv:check-sampleconfig]
|
[testenv:check-sampleconfig]
|
||||||
commands = {toxinidir}/scripts-dev/generate_sample_config --check
|
commands = {toxinidir}/scripts-dev/generate_sample_config --check
|
||||||
|
|
||||||
[testenv:codecov]
|
[testenv:combine]
|
||||||
skip_install = True
|
skip_install = True
|
||||||
deps =
|
deps =
|
||||||
coverage
|
coverage
|
||||||
codecov
|
whitelist_externals =
|
||||||
|
bash
|
||||||
commands=
|
commands=
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage report
|
||||||
codecov -X gcov
|
|
||||||
|
|
||||||
[testenv:mypy]
|
[testenv:mypy]
|
||||||
basepython = python3.5
|
basepython = python3.5
|
||||||
|
|
Loading…
Reference in New Issue