2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2018-04-23 01:53:18 +02:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
2020-11-25 19:30:47 +01:00
|
|
|
import urllib.parse
|
2021-12-09 12:58:25 +01:00
|
|
|
from http import HTTPStatus
|
2019-02-20 12:35:52 +01:00
|
|
|
from io import BytesIO
|
2020-09-24 16:47:20 +02:00
|
|
|
from typing import (
|
2020-11-25 19:30:47 +01:00
|
|
|
TYPE_CHECKING,
|
2020-09-24 16:47:20 +02:00
|
|
|
Any,
|
|
|
|
BinaryIO,
|
2022-02-10 16:43:01 +01:00
|
|
|
Callable,
|
2020-09-24 16:47:20 +02:00
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2018-09-05 16:10:47 +02:00
|
|
|
import treq
|
2020-08-19 13:26:03 +02:00
|
|
|
from canonicaljson import encode_canonical_json
|
2021-01-13 19:27:49 +01:00
|
|
|
from netaddr import AddrFormatError, IPAddress, IPSet
|
2018-07-09 08:09:20 +02:00
|
|
|
from prometheus_client import Counter
|
2021-04-23 12:08:41 +02:00
|
|
|
from typing_extensions import Protocol
|
2018-12-21 15:56:13 +01:00
|
|
|
from zope.interface import implementer, provider
|
2015-08-24 17:17:38 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from OpenSSL import SSL
|
|
|
|
from OpenSSL.SSL import VERIFY_NONE
|
2020-09-29 11:29:21 +02:00
|
|
|
from twisted.internet import defer, error as twisted_error, protocol, ssl
|
2021-03-11 15:15:22 +01:00
|
|
|
from twisted.internet.address import IPv4Address, IPv6Address
|
2018-12-21 15:56:13 +01:00
|
|
|
from twisted.internet.interfaces import (
|
2020-11-25 19:30:47 +01:00
|
|
|
IAddress,
|
2022-05-12 16:33:50 +02:00
|
|
|
IDelayedCall,
|
2020-11-25 19:30:47 +01:00
|
|
|
IHostResolution,
|
2023-02-28 16:17:55 +01:00
|
|
|
IOpenSSLContextFactory,
|
2023-02-07 01:20:04 +01:00
|
|
|
IReactorCore,
|
2018-12-21 15:56:13 +01:00
|
|
|
IReactorPluggableNameResolver,
|
2022-05-12 16:33:50 +02:00
|
|
|
IReactorTime,
|
2018-12-21 15:56:13 +01:00
|
|
|
IResolutionReceiver,
|
2021-03-15 16:14:39 +01:00
|
|
|
ITCPTransport,
|
2014-10-30 02:21:33 +01:00
|
|
|
)
|
2021-03-15 16:14:39 +01:00
|
|
|
from twisted.internet.protocol import connectionDone
|
2020-07-15 16:27:35 +02:00
|
|
|
from twisted.internet.task import Cooperator
|
2018-12-21 15:56:13 +01:00
|
|
|
from twisted.python.failure import Failure
|
|
|
|
from twisted.web._newclient import ResponseDone
|
2020-09-29 11:29:21 +02:00
|
|
|
from twisted.web.client import (
|
|
|
|
Agent,
|
|
|
|
HTTPConnectionPool,
|
|
|
|
ResponseNeverReceived,
|
|
|
|
readBody,
|
|
|
|
)
|
2016-03-31 02:55:21 +02:00
|
|
|
from twisted.web.http import PotentialDataLoss
|
2014-08-12 16:10:52 +02:00
|
|
|
from twisted.web.http_headers import Headers
|
2021-03-12 17:37:57 +01:00
|
|
|
from twisted.web.iweb import (
|
|
|
|
UNKNOWN_LENGTH,
|
|
|
|
IAgent,
|
|
|
|
IBodyProducer,
|
|
|
|
IPolicyForHTTPS,
|
|
|
|
IResponse,
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
from synapse.api.errors import Codes, HttpResponseException, SynapseError
|
2020-09-29 11:29:21 +02:00
|
|
|
from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_uri
|
2019-11-01 15:07:44 +01:00
|
|
|
from synapse.http.proxyagent import ProxyAgent
|
2022-04-08 14:06:51 +02:00
|
|
|
from synapse.http.types import QueryParams
|
2019-07-03 16:07:04 +02:00
|
|
|
from synapse.logging.context import make_deferred_yieldable
|
2019-09-05 18:44:55 +02:00
|
|
|
from synapse.logging.opentracing import set_tag, start_active_span, tags
|
2021-03-08 14:25:43 +01:00
|
|
|
from synapse.types import ISynapseReactor
|
2020-08-19 13:26:03 +02:00
|
|
|
from synapse.util import json_decoder
|
2018-09-19 11:39:40 +02:00
|
|
|
from synapse.util.async_helpers import timeout_deferred
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 12:12:48 +01:00
|
|
|
from synapse.server import HomeServer
|
2020-11-25 19:30:47 +01:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-22 02:47:37 +02:00
|
|
|
outgoing_requests_counter = Counter("synapse_http_client_requests", "", ["method"])
|
2018-12-21 15:56:13 +01:00
|
|
|
incoming_responses_counter = Counter(
|
|
|
|
"synapse_http_client_responses", "", ["method", "code"]
|
|
|
|
)
|
|
|
|
|
2022-10-28 18:04:02 +02:00
|
|
|
# the type of the headers map, to be passed to the t.w.h.Headers.
|
|
|
|
#
|
|
|
|
# The actual type accepted by Twisted is
|
|
|
|
# Mapping[Union[str, bytes], Sequence[Union[str, bytes]] ,
|
|
|
|
# allowing us to mix and match str and bytes freely. However: any str is also a
|
|
|
|
# Sequence[str]; passing a header string value which is a
|
|
|
|
# standalone str is interpreted as a sequence of 1-codepoint strings. This is a disastrous footgun.
|
|
|
|
# We use a narrower value type (RawHeaderValue) to avoid this footgun.
|
|
|
|
#
|
|
|
|
# We also simplify the keys to be either all str or all bytes. This helps because
|
|
|
|
# Dict[K, V] is invariant in K (and indeed V).
|
2020-09-24 16:47:20 +02:00
|
|
|
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
|
|
|
|
|
|
|
|
# the value actually has to be a List, but List is invariant so we can't specify that
|
|
|
|
# the entries can either be Lists or bytes.
|
2022-10-28 18:04:02 +02:00
|
|
|
RawHeaderValue = Union[
|
|
|
|
List[str],
|
|
|
|
List[bytes],
|
|
|
|
List[Union[str, bytes]],
|
|
|
|
Tuple[str, ...],
|
|
|
|
Tuple[bytes, ...],
|
|
|
|
Tuple[Union[str, bytes], ...],
|
|
|
|
]
|
2020-09-24 16:47:20 +02:00
|
|
|
|
2018-12-21 15:56:13 +01:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def check_against_blacklist(
|
|
|
|
ip_address: IPAddress, ip_whitelist: Optional[IPSet], ip_blacklist: IPSet
|
|
|
|
) -> bool:
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
2020-11-25 19:30:47 +01:00
|
|
|
Compares an IP address to allowed and disallowed IP sets.
|
|
|
|
|
2018-12-21 15:56:13 +01:00
|
|
|
Args:
|
2020-11-25 19:30:47 +01:00
|
|
|
ip_address: The IP address to check
|
|
|
|
ip_whitelist: Allowed IP addresses.
|
|
|
|
ip_blacklist: Disallowed IP addresses.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the IP address is in the blacklist and not in the whitelist.
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
if ip_address in ip_blacklist:
|
|
|
|
if ip_whitelist is None or ip_address not in ip_whitelist:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-07-15 16:27:35 +02:00
|
|
|
_EPSILON = 0.00000001
|
|
|
|
|
|
|
|
|
2022-05-12 16:33:50 +02:00
|
|
|
def _make_scheduler(
|
|
|
|
reactor: IReactorTime,
|
|
|
|
) -> Callable[[Callable[[], object]], IDelayedCall]:
|
2020-07-15 16:27:35 +02:00
|
|
|
"""Makes a schedular suitable for a Cooperator using the given reactor.
|
|
|
|
|
|
|
|
(This is effectively just a copy from `twisted.internet.task`)
|
|
|
|
"""
|
|
|
|
|
2022-05-12 16:33:50 +02:00
|
|
|
def _scheduler(x: Callable[[], object]) -> IDelayedCall:
|
2020-07-15 16:27:35 +02:00
|
|
|
return reactor.callLater(_EPSILON, x)
|
|
|
|
|
|
|
|
return _scheduler
|
|
|
|
|
|
|
|
|
2020-12-02 17:09:24 +01:00
|
|
|
class _IPBlacklistingResolver:
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
A proxy for reactor.nameResolver which only produces non-blacklisted IP
|
|
|
|
addresses, preventing DNS rebinding attacks on URL preview.
|
|
|
|
"""
|
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
reactor: IReactorPluggableNameResolver,
|
|
|
|
ip_whitelist: Optional[IPSet],
|
|
|
|
ip_blacklist: IPSet,
|
|
|
|
):
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
Args:
|
2020-11-25 19:30:47 +01:00
|
|
|
reactor: The twisted reactor.
|
|
|
|
ip_whitelist: IP addresses to allow.
|
|
|
|
ip_blacklist: IP addresses to disallow.
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
self._reactor = reactor
|
|
|
|
self._ip_whitelist = ip_whitelist
|
|
|
|
self._ip_blacklist = ip_blacklist
|
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def resolveHostName(
|
|
|
|
self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0
|
|
|
|
) -> IResolutionReceiver:
|
2021-07-15 12:02:43 +02:00
|
|
|
addresses: List[IAddress] = []
|
2018-12-21 15:56:13 +01:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def _callback() -> None:
|
2019-05-10 19:32:44 +02:00
|
|
|
has_bad_ip = False
|
2021-03-11 15:15:22 +01:00
|
|
|
for address in addresses:
|
|
|
|
# We only expect IPv4 and IPv6 addresses since only A/AAAA lookups
|
|
|
|
# should go through this path.
|
|
|
|
if not isinstance(address, (IPv4Address, IPv6Address)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
ip_address = IPAddress(address.host)
|
2018-12-21 15:56:13 +01:00
|
|
|
|
|
|
|
if check_against_blacklist(
|
|
|
|
ip_address, self._ip_whitelist, self._ip_blacklist
|
|
|
|
):
|
|
|
|
logger.info(
|
2019-06-20 11:32:02 +02:00
|
|
|
"Dropped %s from DNS resolution to %s due to blacklist"
|
|
|
|
% (ip_address, hostname)
|
2018-12-21 15:56:13 +01:00
|
|
|
)
|
2019-05-10 19:32:44 +02:00
|
|
|
has_bad_ip = True
|
|
|
|
|
|
|
|
# if we have a blacklisted IP, we'd like to raise an error to block the
|
|
|
|
# request, but all we can really do from here is claim that there were no
|
|
|
|
# valid results.
|
|
|
|
if not has_bad_ip:
|
2021-03-11 15:15:22 +01:00
|
|
|
for address in addresses:
|
|
|
|
recv.addressResolved(address)
|
|
|
|
recv.resolutionComplete()
|
2018-12-21 15:56:13 +01:00
|
|
|
|
2019-05-10 19:32:44 +02:00
|
|
|
@provider(IResolutionReceiver)
|
2020-09-04 12:54:56 +02:00
|
|
|
class EndpointReceiver:
|
2019-05-10 19:32:44 +02:00
|
|
|
@staticmethod
|
2020-11-25 19:30:47 +01:00
|
|
|
def resolutionBegan(resolutionInProgress: IHostResolution) -> None:
|
2021-03-11 15:15:22 +01:00
|
|
|
recv.resolutionBegan(resolutionInProgress)
|
2019-05-10 19:32:44 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2020-11-25 19:30:47 +01:00
|
|
|
def addressResolved(address: IAddress) -> None:
|
2018-12-21 15:56:13 +01:00
|
|
|
addresses.append(address)
|
|
|
|
|
|
|
|
@staticmethod
|
2020-11-25 19:30:47 +01:00
|
|
|
def resolutionComplete() -> None:
|
2019-05-10 19:32:44 +02:00
|
|
|
_callback()
|
2018-12-21 15:56:13 +01:00
|
|
|
|
|
|
|
self._reactor.nameResolver.resolveHostName(
|
|
|
|
EndpointReceiver, hostname, portNumber=portNumber
|
|
|
|
)
|
|
|
|
|
2021-03-11 15:15:22 +01:00
|
|
|
return recv
|
2018-12-21 15:56:13 +01:00
|
|
|
|
|
|
|
|
2023-02-07 01:20:04 +01:00
|
|
|
# ISynapseReactor implies IReactorCore, but explicitly marking it this as an implementer
|
|
|
|
# of IReactorCore seems to keep mypy-zope happier.
|
|
|
|
@implementer(IReactorCore, ISynapseReactor)
|
2020-12-02 17:09:24 +01:00
|
|
|
class BlacklistingReactorWrapper:
|
|
|
|
"""
|
|
|
|
A Reactor wrapper which will prevent DNS resolution to blacklisted IP
|
|
|
|
addresses, to prevent DNS rebinding.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
reactor: IReactorPluggableNameResolver,
|
|
|
|
ip_whitelist: Optional[IPSet],
|
|
|
|
ip_blacklist: IPSet,
|
|
|
|
):
|
|
|
|
self._reactor = reactor
|
|
|
|
|
|
|
|
# We need to use a DNS resolver which filters out blacklisted IP
|
|
|
|
# addresses, to prevent DNS rebinding.
|
|
|
|
self._nameResolver = _IPBlacklistingResolver(
|
|
|
|
self._reactor, ip_whitelist, ip_blacklist
|
|
|
|
)
|
|
|
|
|
|
|
|
def __getattr__(self, attr: str) -> Any:
|
|
|
|
# Passthrough to the real reactor except for the DNS resolver.
|
|
|
|
if attr == "nameResolver":
|
|
|
|
return self._nameResolver
|
|
|
|
else:
|
|
|
|
return getattr(self._reactor, attr)
|
|
|
|
|
|
|
|
|
2018-12-21 15:56:13 +01:00
|
|
|
class BlacklistingAgentWrapper(Agent):
|
|
|
|
"""
|
|
|
|
An Agent wrapper which will prevent access to IP addresses being accessed
|
|
|
|
directly (without an IP address lookup).
|
|
|
|
"""
|
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
agent: IAgent,
|
2023-03-09 13:09:49 +01:00
|
|
|
ip_blacklist: IPSet,
|
2020-11-25 19:30:47 +01:00
|
|
|
ip_whitelist: Optional[IPSet] = None,
|
|
|
|
):
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
Args:
|
2020-11-25 19:30:47 +01:00
|
|
|
agent: The Agent to wrap.
|
|
|
|
ip_whitelist: IP addresses to allow.
|
|
|
|
ip_blacklist: IP addresses to disallow.
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
self._agent = agent
|
|
|
|
self._ip_whitelist = ip_whitelist
|
|
|
|
self._ip_blacklist = ip_blacklist
|
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def request(
|
|
|
|
self,
|
|
|
|
method: bytes,
|
|
|
|
uri: bytes,
|
|
|
|
headers: Optional[Headers] = None,
|
|
|
|
bodyProducer: Optional[IBodyProducer] = None,
|
|
|
|
) -> defer.Deferred:
|
2019-06-20 11:32:02 +02:00
|
|
|
h = urllib.parse.urlparse(uri.decode("ascii"))
|
2018-12-21 15:56:13 +01:00
|
|
|
|
|
|
|
try:
|
2023-03-09 13:09:49 +01:00
|
|
|
# h.hostname is Optional[str], None raises an AddrFormatError, so
|
|
|
|
# this is safe even though IPAddress requires a str.
|
|
|
|
ip_address = IPAddress(h.hostname) # type: ignore[arg-type]
|
2021-01-13 19:27:49 +01:00
|
|
|
except AddrFormatError:
|
|
|
|
# Not an IP
|
|
|
|
pass
|
|
|
|
else:
|
2018-12-21 15:56:13 +01:00
|
|
|
if check_against_blacklist(
|
|
|
|
ip_address, self._ip_whitelist, self._ip_blacklist
|
|
|
|
):
|
2019-06-20 11:32:02 +02:00
|
|
|
logger.info("Blocking access to %s due to blacklist" % (ip_address,))
|
2021-12-09 12:58:25 +01:00
|
|
|
e = SynapseError(
|
|
|
|
HTTPStatus.FORBIDDEN, "IP address blocked by IP blacklist entry"
|
|
|
|
)
|
2018-12-21 15:56:13 +01:00
|
|
|
return defer.fail(Failure(e))
|
|
|
|
|
|
|
|
return self._agent.request(
|
|
|
|
method, uri, headers=headers, bodyProducer=bodyProducer
|
|
|
|
)
|
2015-02-24 20:51:21 +01:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2023-04-14 22:46:04 +02:00
|
|
|
class BaseHttpClient:
|
2014-11-20 18:41:56 +01:00
|
|
|
"""
|
2014-11-20 19:00:10 +01:00
|
|
|
A simple, no-frills HTTP client with methods that wrap up common ways of
|
2023-04-14 22:46:04 +02:00
|
|
|
using HTTP in Matrix. Does not come with a default Agent, subclasses will need to
|
|
|
|
define their own.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
hs: The HomeServer instance to pass in
|
|
|
|
treq_args: Extra keyword arguments to be given to treq.request.
|
2014-10-02 14:57:48 +02:00
|
|
|
"""
|
2018-12-21 15:56:13 +01:00
|
|
|
|
2023-04-14 22:46:04 +02:00
|
|
|
agent: IAgent
|
|
|
|
|
2019-11-01 15:07:44 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
2020-11-25 19:30:47 +01:00
|
|
|
hs: "HomeServer",
|
2021-04-08 23:38:54 +02:00
|
|
|
treq_args: Optional[Dict[str, Any]] = None,
|
2019-11-01 15:07:44 +01:00
|
|
|
):
|
2014-10-02 14:57:48 +02:00
|
|
|
self.hs = hs
|
2023-04-14 22:46:04 +02:00
|
|
|
self.reactor = hs.get_reactor()
|
2018-01-20 01:55:44 +01:00
|
|
|
|
2021-04-08 23:38:54 +02:00
|
|
|
self._extra_treq_args = treq_args or {}
|
2018-12-21 15:56:13 +01:00
|
|
|
self.clock = hs.get_clock()
|
2022-02-11 13:20:16 +01:00
|
|
|
|
|
|
|
user_agent = hs.version_string
|
2021-09-15 14:34:52 +02:00
|
|
|
if hs.config.server.user_agent_suffix:
|
2022-02-11 13:20:16 +01:00
|
|
|
user_agent = "%s %s" % (
|
|
|
|
user_agent,
|
2021-09-15 14:34:52 +02:00
|
|
|
hs.config.server.user_agent_suffix,
|
|
|
|
)
|
2022-02-11 13:20:16 +01:00
|
|
|
self.user_agent = user_agent.encode("ascii")
|
2018-12-21 15:56:13 +01:00
|
|
|
|
2020-07-15 16:27:35 +02:00
|
|
|
# We use this for our body producers to ensure that they use the correct
|
|
|
|
# reactor.
|
|
|
|
self._cooperator = Cooperator(scheduler=_make_scheduler(hs.get_reactor()))
|
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def request(
|
|
|
|
self,
|
|
|
|
method: str,
|
|
|
|
uri: str,
|
|
|
|
data: Optional[bytes] = None,
|
|
|
|
headers: Optional[Headers] = None,
|
|
|
|
) -> IResponse:
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
method: HTTP method to use.
|
|
|
|
uri: URI to query.
|
|
|
|
data: Data to send in the request body, if applicable.
|
|
|
|
headers: Request headers.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Response object, once the headers have been read.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
RequestTimedOutError if the request times out before the headers are read
|
|
|
|
|
2018-12-21 15:56:13 +01:00
|
|
|
"""
|
2018-05-22 02:47:37 +02:00
|
|
|
outgoing_requests_counter.labels(method).inc()
|
2017-05-05 11:49:19 +02:00
|
|
|
|
2018-06-03 00:25:13 +02:00
|
|
|
# log request but strip `access_token` (AS requests for example include this)
|
2020-08-11 19:10:07 +02:00
|
|
|
logger.debug("Sending request %s %s", method, redact_uri(uri))
|
2018-04-23 01:53:18 +02:00
|
|
|
|
2019-09-05 18:44:55 +02:00
|
|
|
with start_active_span(
|
|
|
|
"outgoing-client-request",
|
|
|
|
tags={
|
|
|
|
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
|
|
|
|
tags.HTTP_METHOD: method,
|
|
|
|
tags.HTTP_URL: uri,
|
|
|
|
},
|
|
|
|
finish_on_close=True,
|
|
|
|
):
|
|
|
|
try:
|
|
|
|
body_producer = None
|
|
|
|
if data is not None:
|
2020-07-15 16:27:35 +02:00
|
|
|
body_producer = QuieterFileBodyProducer(
|
2021-02-16 23:32:34 +01:00
|
|
|
BytesIO(data),
|
|
|
|
cooperator=self._cooperator,
|
2020-07-15 16:27:35 +02:00
|
|
|
)
|
2019-09-05 18:44:55 +02:00
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
request_deferred: defer.Deferred = treq.request(
|
2019-09-05 18:44:55 +02:00
|
|
|
method,
|
|
|
|
uri,
|
|
|
|
agent=self.agent,
|
|
|
|
data=body_producer,
|
|
|
|
headers=headers,
|
2021-02-18 15:01:29 +01:00
|
|
|
# Avoid buffering the body in treq since we do not reuse
|
|
|
|
# response bodies.
|
|
|
|
unbuffered=True,
|
2020-10-28 00:26:36 +01:00
|
|
|
**self._extra_treq_args,
|
2021-07-15 12:02:43 +02:00
|
|
|
)
|
2020-09-29 11:29:21 +02:00
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
# we use our own timeout mechanism rather than treq's as a workaround
|
|
|
|
# for https://twistedmatrix.com/trac/ticket/9534.
|
2019-09-05 18:44:55 +02:00
|
|
|
request_deferred = timeout_deferred(
|
2021-02-16 23:32:34 +01:00
|
|
|
request_deferred,
|
|
|
|
60,
|
|
|
|
self.hs.get_reactor(),
|
2019-09-05 18:44:55 +02:00
|
|
|
)
|
2020-09-29 11:29:21 +02:00
|
|
|
|
|
|
|
# turn timeouts into RequestTimedOutErrors
|
|
|
|
request_deferred.addErrback(_timeout_to_request_timed_out_error)
|
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
response = await make_deferred_yieldable(request_deferred)
|
2019-09-05 18:44:55 +02:00
|
|
|
|
|
|
|
incoming_responses_counter.labels(method, response.code).inc()
|
|
|
|
logger.info(
|
|
|
|
"Received response to %s %s: %s",
|
|
|
|
method,
|
|
|
|
redact_uri(uri),
|
|
|
|
response.code,
|
|
|
|
)
|
|
|
|
return response
|
|
|
|
except Exception as e:
|
|
|
|
incoming_responses_counter.labels(method, "ERR").inc()
|
|
|
|
logger.info(
|
|
|
|
"Error sending request to %s %s: %s %s",
|
|
|
|
method,
|
|
|
|
redact_uri(uri),
|
|
|
|
type(e).__name__,
|
|
|
|
e.args[0],
|
|
|
|
)
|
|
|
|
set_tag(tags.ERROR, True)
|
|
|
|
set_tag("error_reason", e.args[0])
|
|
|
|
raise
|
2015-02-24 20:05:20 +01:00
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def post_urlencoded_get_json(
|
|
|
|
self,
|
|
|
|
uri: str,
|
2020-11-25 19:30:47 +01:00
|
|
|
args: Optional[Mapping[str, Union[str, List[str]]]] = None,
|
2020-09-24 16:47:20 +02:00
|
|
|
headers: Optional[RawHeaders] = None,
|
|
|
|
) -> Any:
|
2017-10-27 11:59:50 +02:00
|
|
|
"""
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
uri: uri to query
|
|
|
|
args: parameters to be url-encoded in the body
|
|
|
|
headers: a map from header name to a list of values for that header
|
2017-10-27 11:59:50 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-09-24 16:47:20 +02:00
|
|
|
parsed json
|
2018-08-01 16:04:50 +02:00
|
|
|
|
|
|
|
Raises:
|
2020-09-29 11:29:21 +02:00
|
|
|
RequestTimedOutError: if there is a timeout before the response headers
|
2020-09-24 16:47:20 +02:00
|
|
|
are received. Note there is currently no timeout on reading the response
|
|
|
|
body.
|
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
HttpResponseException: On a non-2xx HTTP response.
|
|
|
|
|
|
|
|
ValueError: if the response was not JSON
|
2017-10-27 11:59:50 +02:00
|
|
|
"""
|
|
|
|
|
2015-06-15 14:05:11 +02:00
|
|
|
# TODO: Do we ever want to log message contents?
|
2014-11-20 14:53:34 +01:00
|
|
|
logger.debug("post_urlencoded_get_json args: %s", args)
|
2015-06-15 14:05:11 +02:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
query_bytes = encode_query_args(args)
|
2014-11-20 14:53:34 +01:00
|
|
|
|
2017-10-26 18:59:50 +02:00
|
|
|
actual_headers = {
|
|
|
|
b"Content-Type": [b"application/x-www-form-urlencoded"],
|
|
|
|
b"User-Agent": [self.user_agent],
|
2020-05-08 14:30:40 +02:00
|
|
|
b"Accept": [b"application/json"],
|
2017-10-26 18:59:50 +02:00
|
|
|
}
|
|
|
|
if headers:
|
2020-11-25 19:30:47 +01:00
|
|
|
actual_headers.update(headers) # type: ignore
|
2017-10-26 18:59:50 +02:00
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
response = await self.request(
|
2018-12-21 15:56:13 +01:00
|
|
|
"POST", uri, headers=Headers(actual_headers), data=query_bytes
|
2014-11-20 14:53:34 +01:00
|
|
|
)
|
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
body = await make_deferred_yieldable(readBody(response))
|
2019-01-24 14:38:29 +01:00
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
if 200 <= response.code < 300:
|
2020-08-19 13:26:03 +02:00
|
|
|
return json_decoder.decode(body.decode("utf-8"))
|
2018-08-01 16:04:50 +02:00
|
|
|
else:
|
2020-07-29 19:56:06 +02:00
|
|
|
raise HttpResponseException(
|
|
|
|
response.code, response.phrase.decode("ascii", errors="replace"), body
|
|
|
|
)
|
2014-11-21 13:21:00 +01:00
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def post_json_get_json(
|
|
|
|
self, uri: str, post_json: Any, headers: Optional[RawHeaders] = None
|
|
|
|
) -> Any:
|
2017-10-26 18:59:50 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
uri: URI to query.
|
|
|
|
post_json: request body, to be encoded as json
|
|
|
|
headers: a map from header name to a list of values for that header
|
2017-10-26 18:59:50 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-09-24 16:47:20 +02:00
|
|
|
parsed json
|
2018-08-01 16:04:50 +02:00
|
|
|
|
|
|
|
Raises:
|
2020-09-29 11:29:21 +02:00
|
|
|
RequestTimedOutError: if there is a timeout before the response headers
|
2020-09-24 16:47:20 +02:00
|
|
|
are received. Note there is currently no timeout on reading the response
|
|
|
|
body.
|
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
HttpResponseException: On a non-2xx HTTP response.
|
|
|
|
|
|
|
|
ValueError: if the response was not JSON
|
2017-10-26 18:59:50 +02:00
|
|
|
"""
|
2015-02-11 18:34:23 +01:00
|
|
|
json_str = encode_canonical_json(post_json)
|
2014-11-21 13:21:00 +01:00
|
|
|
|
2015-06-15 14:05:11 +02:00
|
|
|
logger.debug("HTTP POST %s -> %s", json_str, uri)
|
2014-11-21 13:21:00 +01:00
|
|
|
|
2017-10-26 18:59:50 +02:00
|
|
|
actual_headers = {
|
|
|
|
b"Content-Type": [b"application/json"],
|
|
|
|
b"User-Agent": [self.user_agent],
|
2020-05-08 14:30:40 +02:00
|
|
|
b"Accept": [b"application/json"],
|
2017-10-26 18:59:50 +02:00
|
|
|
}
|
|
|
|
if headers:
|
2020-11-25 19:30:47 +01:00
|
|
|
actual_headers.update(headers) # type: ignore
|
2017-10-26 18:59:50 +02:00
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
response = await self.request(
|
2018-12-21 15:56:13 +01:00
|
|
|
"POST", uri, headers=Headers(actual_headers), data=json_str
|
2014-11-21 13:21:00 +01:00
|
|
|
)
|
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
body = await make_deferred_yieldable(readBody(response))
|
2014-11-21 13:21:00 +01:00
|
|
|
|
2017-04-25 20:30:55 +02:00
|
|
|
if 200 <= response.code < 300:
|
2020-08-19 13:26:03 +02:00
|
|
|
return json_decoder.decode(body.decode("utf-8"))
|
2017-04-25 20:30:55 +02:00
|
|
|
else:
|
2020-07-29 19:56:06 +02:00
|
|
|
raise HttpResponseException(
|
|
|
|
response.code, response.phrase.decode("ascii", errors="replace"), body
|
|
|
|
)
|
2014-11-20 14:53:34 +01:00
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def get_json(
|
2020-11-25 19:30:47 +01:00
|
|
|
self,
|
|
|
|
uri: str,
|
|
|
|
args: Optional[QueryParams] = None,
|
|
|
|
headers: Optional[RawHeaders] = None,
|
2020-09-24 16:47:20 +02:00
|
|
|
) -> Any:
|
|
|
|
"""Gets some json from the given URI.
|
2014-11-20 14:53:34 +01:00
|
|
|
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
uri: The URI to request, not including query parameters
|
|
|
|
args: A dictionary used to create query string
|
|
|
|
headers: a map from header name to a list of values for that header
|
2014-11-20 14:53:34 +01:00
|
|
|
Returns:
|
2020-09-24 16:47:20 +02:00
|
|
|
Succeeds when we get a 2xx HTTP response, with the HTTP body as JSON.
|
2015-02-04 18:07:31 +01:00
|
|
|
Raises:
|
2020-09-29 11:29:21 +02:00
|
|
|
RequestTimedOutError: if there is a timeout before the response headers
|
2020-09-24 16:47:20 +02:00
|
|
|
are received. Note there is currently no timeout on reading the response
|
|
|
|
body.
|
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
HttpResponseException On a non-2xx HTTP response.
|
|
|
|
|
|
|
|
ValueError: if the response was not JSON
|
2014-11-20 14:53:34 +01:00
|
|
|
"""
|
2020-05-08 14:30:40 +02:00
|
|
|
actual_headers = {b"Accept": [b"application/json"]}
|
|
|
|
if headers:
|
2020-11-25 19:30:47 +01:00
|
|
|
actual_headers.update(headers) # type: ignore
|
2020-05-08 14:30:40 +02:00
|
|
|
|
2022-01-05 12:59:29 +01:00
|
|
|
body = await self.get_raw(uri, args, headers=actual_headers)
|
2020-08-19 13:26:03 +02:00
|
|
|
return json_decoder.decode(body.decode("utf-8"))
|
2015-10-09 12:02:56 +02:00
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def put_json(
|
|
|
|
self,
|
|
|
|
uri: str,
|
|
|
|
json_body: Any,
|
2020-11-25 19:30:47 +01:00
|
|
|
args: Optional[QueryParams] = None,
|
2021-04-05 15:10:18 +02:00
|
|
|
headers: Optional[RawHeaders] = None,
|
2020-09-24 16:47:20 +02:00
|
|
|
) -> Any:
|
|
|
|
"""Puts some json to the given URI.
|
2015-10-09 12:02:56 +02:00
|
|
|
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
uri: The URI to request, not including query parameters
|
|
|
|
json_body: The JSON to put in the HTTP body,
|
|
|
|
args: A dictionary used to create query strings
|
|
|
|
headers: a map from header name to a list of values for that header
|
2015-10-09 12:02:56 +02:00
|
|
|
Returns:
|
2020-09-24 16:47:20 +02:00
|
|
|
Succeeds when we get a 2xx HTTP response, with the HTTP body as JSON.
|
2015-10-09 12:02:56 +02:00
|
|
|
Raises:
|
2020-09-29 11:29:21 +02:00
|
|
|
RequestTimedOutError: if there is a timeout before the response headers
|
2020-09-24 16:47:20 +02:00
|
|
|
are received. Note there is currently no timeout on reading the response
|
|
|
|
body.
|
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
HttpResponseException On a non-2xx HTTP response.
|
|
|
|
|
|
|
|
ValueError: if the response was not JSON
|
2015-10-09 12:02:56 +02:00
|
|
|
"""
|
2020-11-25 19:30:47 +01:00
|
|
|
if args:
|
|
|
|
query_str = urllib.parse.urlencode(args, True)
|
|
|
|
uri = "%s?%s" % (uri, query_str)
|
2014-11-20 14:53:34 +01:00
|
|
|
|
2015-10-09 12:02:56 +02:00
|
|
|
json_str = encode_canonical_json(json_body)
|
|
|
|
|
2017-10-26 18:59:50 +02:00
|
|
|
actual_headers = {
|
|
|
|
b"Content-Type": [b"application/json"],
|
|
|
|
b"User-Agent": [self.user_agent],
|
2020-05-08 14:30:40 +02:00
|
|
|
b"Accept": [b"application/json"],
|
2017-10-26 18:59:50 +02:00
|
|
|
}
|
|
|
|
if headers:
|
2020-11-25 19:30:47 +01:00
|
|
|
actual_headers.update(headers) # type: ignore
|
2017-10-26 18:59:50 +02:00
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
response = await self.request(
|
2018-12-21 15:56:13 +01:00
|
|
|
"PUT", uri, headers=Headers(actual_headers), data=json_str
|
2014-11-20 14:53:34 +01:00
|
|
|
)
|
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
body = await make_deferred_yieldable(readBody(response))
|
2014-11-20 14:53:34 +01:00
|
|
|
|
2015-02-04 18:07:31 +01:00
|
|
|
if 200 <= response.code < 300:
|
2020-08-19 13:26:03 +02:00
|
|
|
return json_decoder.decode(body.decode("utf-8"))
|
2015-02-04 18:07:31 +01:00
|
|
|
else:
|
2020-07-29 19:56:06 +02:00
|
|
|
raise HttpResponseException(
|
|
|
|
response.code, response.phrase.decode("ascii", errors="replace"), body
|
|
|
|
)
|
2014-11-20 14:53:34 +01:00
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def get_raw(
|
2020-11-25 19:30:47 +01:00
|
|
|
self,
|
|
|
|
uri: str,
|
|
|
|
args: Optional[QueryParams] = None,
|
|
|
|
headers: Optional[RawHeaders] = None,
|
2020-09-24 16:47:20 +02:00
|
|
|
) -> bytes:
|
|
|
|
"""Gets raw text from the given URI.
|
2015-02-04 18:39:51 +01:00
|
|
|
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
uri: The URI to request, not including query parameters
|
|
|
|
args: A dictionary used to create query strings
|
|
|
|
headers: a map from header name to a list of values for that header
|
2015-02-04 18:39:51 +01:00
|
|
|
Returns:
|
2020-09-24 16:47:20 +02:00
|
|
|
Succeeds when we get a 2xx HTTP response, with the
|
2020-07-10 20:30:08 +02:00
|
|
|
HTTP body as bytes.
|
2015-02-04 18:39:51 +01:00
|
|
|
Raises:
|
2020-09-29 11:29:21 +02:00
|
|
|
RequestTimedOutError: if there is a timeout before the response headers
|
2020-09-24 16:47:20 +02:00
|
|
|
are received. Note there is currently no timeout on reading the response
|
|
|
|
body.
|
|
|
|
|
2018-08-01 16:04:50 +02:00
|
|
|
HttpResponseException on a non-2xx HTTP response.
|
2015-02-04 18:39:51 +01:00
|
|
|
"""
|
2020-11-25 19:30:47 +01:00
|
|
|
if args:
|
|
|
|
query_str = urllib.parse.urlencode(args, True)
|
|
|
|
uri = "%s?%s" % (uri, query_str)
|
2015-02-04 18:39:51 +01:00
|
|
|
|
2018-12-21 15:56:13 +01:00
|
|
|
actual_headers = {b"User-Agent": [self.user_agent]}
|
2017-10-26 18:59:50 +02:00
|
|
|
if headers:
|
2020-11-25 19:30:47 +01:00
|
|
|
actual_headers.update(headers) # type: ignore
|
2017-10-26 18:59:50 +02:00
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
response = await self.request("GET", uri, headers=Headers(actual_headers))
|
2015-02-04 18:39:51 +01:00
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
body = await make_deferred_yieldable(readBody(response))
|
2015-02-04 18:39:51 +01:00
|
|
|
|
|
|
|
if 200 <= response.code < 300:
|
2019-07-23 15:00:55 +02:00
|
|
|
return body
|
2015-02-04 18:39:51 +01:00
|
|
|
else:
|
2020-07-29 19:56:06 +02:00
|
|
|
raise HttpResponseException(
|
|
|
|
response.code, response.phrase.decode("ascii", errors="replace"), body
|
|
|
|
)
|
2017-04-25 20:30:55 +02:00
|
|
|
|
2016-01-25 00:47:27 +01:00
|
|
|
# XXX: FIXME: This is horribly copy-pasted from matrixfederationclient.
|
|
|
|
# The two should be factored out.
|
|
|
|
|
2020-09-24 16:47:20 +02:00
|
|
|
async def get_file(
|
|
|
|
self,
|
|
|
|
url: str,
|
|
|
|
output_stream: BinaryIO,
|
|
|
|
max_size: Optional[int] = None,
|
|
|
|
headers: Optional[RawHeaders] = None,
|
2022-02-10 16:43:01 +01:00
|
|
|
is_allowed_content_type: Optional[Callable[[str], bool]] = None,
|
2020-09-24 16:47:20 +02:00
|
|
|
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
|
2016-01-25 00:47:27 +01:00
|
|
|
"""GETs a file from a given URL
|
|
|
|
Args:
|
2020-09-24 16:47:20 +02:00
|
|
|
url: The URL to GET
|
|
|
|
output_stream: File to write the response body to.
|
|
|
|
headers: A map from header name to a list of values for that header
|
2022-02-10 16:43:01 +01:00
|
|
|
is_allowed_content_type: A predicate to determine whether the
|
|
|
|
content type of the file we're downloading is allowed. If set and
|
|
|
|
it evaluates to False when called with the content type, the
|
|
|
|
request will be terminated before completing the download by
|
|
|
|
raising SynapseError.
|
2016-01-25 00:47:27 +01:00
|
|
|
Returns:
|
2020-09-24 16:47:20 +02:00
|
|
|
A tuple of the file length, dict of the response
|
2016-04-03 01:31:57 +02:00
|
|
|
headers, absolute URI of the response and HTTP response code.
|
2020-09-24 16:47:20 +02:00
|
|
|
|
|
|
|
Raises:
|
2020-09-29 11:29:21 +02:00
|
|
|
RequestTimedOutError: if there is a timeout before the response headers
|
2020-09-24 16:47:20 +02:00
|
|
|
are received. Note there is currently no timeout on reading the response
|
|
|
|
body.
|
|
|
|
|
|
|
|
SynapseError: if the response is not a 2xx, the remote file is too large, or
|
|
|
|
another exception happens during the download.
|
2016-01-25 00:47:27 +01:00
|
|
|
"""
|
|
|
|
|
2018-12-21 15:56:13 +01:00
|
|
|
actual_headers = {b"User-Agent": [self.user_agent]}
|
2017-10-26 18:59:50 +02:00
|
|
|
if headers:
|
2020-11-25 19:30:47 +01:00
|
|
|
actual_headers.update(headers) # type: ignore
|
2017-10-26 18:59:50 +02:00
|
|
|
|
2020-08-04 13:22:04 +02:00
|
|
|
response = await self.request("GET", url, headers=Headers(actual_headers))
|
2016-01-25 00:47:27 +01:00
|
|
|
|
2017-10-26 18:59:50 +02:00
|
|
|
resp_headers = dict(response.headers.getAllRawHeaders())
|
2016-01-25 00:47:27 +01:00
|
|
|
|
2016-04-02 04:06:39 +02:00
|
|
|
if response.code > 299:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning("Got %d when downloading %s" % (response.code, url))
|
2021-12-09 12:58:25 +01:00
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_GATEWAY, "Got error %d" % (response.code,), Codes.UNKNOWN
|
|
|
|
)
|
2016-04-02 04:06:39 +02:00
|
|
|
|
2022-02-10 16:43:01 +01:00
|
|
|
if is_allowed_content_type and b"Content-Type" in resp_headers:
|
|
|
|
content_type = resp_headers[b"Content-Type"][0].decode("ascii")
|
|
|
|
if not is_allowed_content_type(content_type):
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_GATEWAY,
|
|
|
|
(
|
|
|
|
"Requested file's content type not allowed for this operation: %s"
|
|
|
|
% content_type
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2016-01-25 00:47:27 +01:00
|
|
|
# TODO: if our Content-Type is HTML or something, just read the first
|
|
|
|
# N bytes into RAM rather than saving it all to disk only to read it
|
|
|
|
# straight back in again
|
|
|
|
|
|
|
|
try:
|
2022-01-24 15:38:37 +01:00
|
|
|
d = read_body_with_max_size(response, output_stream, max_size)
|
|
|
|
|
|
|
|
# Ensure that the body is not read forever.
|
|
|
|
d = timeout_deferred(d, 30, self.hs.get_reactor())
|
|
|
|
|
|
|
|
length = await make_deferred_yieldable(d)
|
2020-12-16 23:25:24 +01:00
|
|
|
except BodyExceededMaxSize:
|
2021-01-18 16:21:42 +01:00
|
|
|
raise SynapseError(
|
2021-12-09 12:58:25 +01:00
|
|
|
HTTPStatus.BAD_GATEWAY,
|
2020-12-16 23:25:24 +01:00
|
|
|
"Requested file is too large > %r bytes" % (max_size,),
|
|
|
|
Codes.TOO_LARGE,
|
2018-12-21 15:56:13 +01:00
|
|
|
)
|
2022-01-24 15:38:37 +01:00
|
|
|
except defer.TimeoutError:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_GATEWAY,
|
|
|
|
"Requested file took too long to download",
|
|
|
|
Codes.TOO_LARGE,
|
|
|
|
)
|
2016-04-08 22:36:48 +02:00
|
|
|
except Exception as e:
|
2021-12-09 12:58:25 +01:00
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.BAD_GATEWAY, ("Failed to download remote body: %s" % e)
|
|
|
|
) from e
|
2016-01-25 00:47:27 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return (
|
|
|
|
length,
|
|
|
|
resp_headers,
|
|
|
|
response.request.absoluteURI.decode("ascii"),
|
|
|
|
response.code,
|
2017-10-26 18:59:50 +02:00
|
|
|
)
|
2016-01-25 00:47:27 +01:00
|
|
|
|
|
|
|
|
2023-04-14 22:46:04 +02:00
|
|
|
class SimpleHttpClient(BaseHttpClient):
|
|
|
|
"""
|
|
|
|
An HTTP client capable of crossing a proxy and respecting a block/allow list.
|
|
|
|
|
|
|
|
This also configures a larger / longer lasting HTTP connection pool.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
hs: The HomeServer instance to pass in
|
|
|
|
treq_args: Extra keyword arguments to be given to treq.request.
|
|
|
|
ip_blacklist: The IP addresses that are blacklisted that
|
|
|
|
we may not request.
|
|
|
|
ip_whitelist: The whitelisted IP addresses, that we can
|
|
|
|
request if it were otherwise caught in a blacklist.
|
|
|
|
use_proxy: Whether proxy settings should be discovered and used
|
|
|
|
from conventional environment variables.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
hs: "HomeServer",
|
|
|
|
treq_args: Optional[Dict[str, Any]] = None,
|
|
|
|
ip_whitelist: Optional[IPSet] = None,
|
|
|
|
ip_blacklist: Optional[IPSet] = None,
|
|
|
|
use_proxy: bool = False,
|
|
|
|
):
|
|
|
|
super().__init__(hs, treq_args=treq_args)
|
|
|
|
self._ip_whitelist = ip_whitelist
|
|
|
|
self._ip_blacklist = ip_blacklist
|
|
|
|
|
|
|
|
if self._ip_blacklist:
|
|
|
|
# If we have an IP blacklist, we need to use a DNS resolver which
|
|
|
|
# filters out blacklisted IP addresses, to prevent DNS rebinding.
|
|
|
|
self.reactor: ISynapseReactor = BlacklistingReactorWrapper(
|
|
|
|
self.reactor, self._ip_whitelist, self._ip_blacklist
|
|
|
|
)
|
|
|
|
|
|
|
|
# the pusher makes lots of concurrent SSL connections to Sygnal, and tends to
|
|
|
|
# do so in batches, so we need to allow the pool to keep lots of idle
|
|
|
|
# connections around.
|
|
|
|
pool = HTTPConnectionPool(self.reactor)
|
|
|
|
# XXX: The justification for using the cache factor here is that larger
|
|
|
|
# instances will need both more cache and more connections.
|
|
|
|
# Still, this should probably be a separate dial
|
|
|
|
pool.maxPersistentPerHost = max(int(100 * hs.config.caches.global_factor), 5)
|
|
|
|
pool.cachedConnectionTimeout = 2 * 60
|
|
|
|
|
|
|
|
self.agent: IAgent = ProxyAgent(
|
|
|
|
self.reactor,
|
|
|
|
hs.get_reactor(),
|
|
|
|
connectTimeout=15,
|
|
|
|
contextFactory=self.hs.get_http_client_context_factory(),
|
|
|
|
pool=pool,
|
|
|
|
use_proxy=use_proxy,
|
|
|
|
)
|
|
|
|
|
|
|
|
if self._ip_blacklist:
|
|
|
|
# If we have an IP blacklist, we then install the blacklisting Agent
|
|
|
|
# which prevents direct access to IP addresses, that are not caught
|
|
|
|
# by the DNS resolution.
|
|
|
|
self.agent = BlacklistingAgentWrapper(
|
|
|
|
self.agent,
|
|
|
|
ip_blacklist=self._ip_blacklist,
|
|
|
|
ip_whitelist=self._ip_whitelist,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-05-12 16:33:50 +02:00
|
|
|
def _timeout_to_request_timed_out_error(f: Failure) -> Failure:
|
2020-09-29 11:29:21 +02:00
|
|
|
if f.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError):
|
|
|
|
# The TCP connection has its own timeout (set by the 'connectTimeout' param
|
|
|
|
# on the Agent), which raises twisted_error.TimeoutError exception.
|
|
|
|
raise RequestTimedOutError("Timeout connecting to remote server")
|
|
|
|
elif f.check(defer.TimeoutError, ResponseNeverReceived):
|
|
|
|
# this one means that we hit our overall timeout on the request
|
|
|
|
raise RequestTimedOutError("Timeout waiting for response from remote server")
|
|
|
|
|
|
|
|
return f
|
|
|
|
|
|
|
|
|
2021-04-23 12:08:41 +02:00
|
|
|
class ByteWriteable(Protocol):
|
|
|
|
"""The type of object which must be passed into read_body_with_max_size.
|
|
|
|
|
|
|
|
Typically this is a file object.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def write(self, data: bytes) -> int:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2020-12-16 23:25:24 +01:00
|
|
|
class BodyExceededMaxSize(Exception):
|
|
|
|
"""The maximum allowed size of the HTTP body was exceeded."""
|
|
|
|
|
|
|
|
|
2021-03-01 18:45:00 +01:00
|
|
|
class _DiscardBodyWithMaxSizeProtocol(protocol.Protocol):
|
|
|
|
"""A protocol which immediately errors upon receiving data."""
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
transport: Optional[ITCPTransport] = None
|
2021-03-15 16:14:39 +01:00
|
|
|
|
2021-03-01 18:45:00 +01:00
|
|
|
def __init__(self, deferred: defer.Deferred):
|
|
|
|
self.deferred = deferred
|
|
|
|
|
2022-05-12 16:33:50 +02:00
|
|
|
def _maybe_fail(self) -> None:
|
2021-03-01 18:45:00 +01:00
|
|
|
"""
|
|
|
|
Report a max size exceed error and disconnect the first time this is called.
|
|
|
|
"""
|
|
|
|
if not self.deferred.called:
|
|
|
|
self.deferred.errback(BodyExceededMaxSize())
|
|
|
|
# Close the connection (forcefully) since all the data will get
|
|
|
|
# discarded anyway.
|
2021-03-15 16:14:39 +01:00
|
|
|
assert self.transport is not None
|
2021-03-01 18:45:00 +01:00
|
|
|
self.transport.abortConnection()
|
|
|
|
|
|
|
|
def dataReceived(self, data: bytes) -> None:
|
|
|
|
self._maybe_fail()
|
|
|
|
|
2021-03-15 16:14:39 +01:00
|
|
|
def connectionLost(self, reason: Failure = connectionDone) -> None:
|
2021-03-01 18:45:00 +01:00
|
|
|
self._maybe_fail()
|
|
|
|
|
|
|
|
|
2020-12-16 23:25:24 +01:00
|
|
|
class _ReadBodyWithMaxSizeProtocol(protocol.Protocol):
|
2021-03-01 18:45:00 +01:00
|
|
|
"""A protocol which reads body to a stream, erroring if the body exceeds a maximum size."""
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
transport: Optional[ITCPTransport] = None
|
2021-03-15 16:14:39 +01:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def __init__(
|
2021-04-23 12:08:41 +02:00
|
|
|
self, stream: ByteWriteable, deferred: defer.Deferred, max_size: Optional[int]
|
2020-11-25 19:30:47 +01:00
|
|
|
):
|
2016-01-25 00:47:27 +01:00
|
|
|
self.stream = stream
|
|
|
|
self.deferred = deferred
|
|
|
|
self.length = 0
|
|
|
|
self.max_size = max_size
|
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
def dataReceived(self, data: bytes) -> None:
|
2021-01-15 17:00:13 +01:00
|
|
|
# If the deferred was called, bail early.
|
|
|
|
if self.deferred.called:
|
|
|
|
return
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
try:
|
|
|
|
self.stream.write(data)
|
|
|
|
except Exception:
|
|
|
|
self.deferred.errback()
|
|
|
|
return
|
|
|
|
|
2016-01-25 00:47:27 +01:00
|
|
|
self.length += len(data)
|
2021-01-15 17:00:13 +01:00
|
|
|
# The first time the maximum size is exceeded, error and cancel the
|
|
|
|
# connection. dataReceived might be called again if data was received
|
|
|
|
# in the meantime.
|
2016-01-25 00:47:27 +01:00
|
|
|
if self.max_size is not None and self.length >= self.max_size:
|
2020-12-16 23:25:24 +01:00
|
|
|
self.deferred.errback(BodyExceededMaxSize())
|
2021-02-18 15:01:29 +01:00
|
|
|
# Close the connection (forcefully) since all the data will get
|
|
|
|
# discarded anyway.
|
2021-03-15 16:14:39 +01:00
|
|
|
assert self.transport is not None
|
2021-02-18 15:01:29 +01:00
|
|
|
self.transport.abortConnection()
|
2016-01-25 00:47:27 +01:00
|
|
|
|
2021-03-15 16:14:39 +01:00
|
|
|
def connectionLost(self, reason: Failure = connectionDone) -> None:
|
2021-01-15 17:00:13 +01:00
|
|
|
# If the maximum size was already exceeded, there's nothing to do.
|
|
|
|
if self.deferred.called:
|
|
|
|
return
|
|
|
|
|
2016-01-25 00:47:27 +01:00
|
|
|
if reason.check(ResponseDone):
|
|
|
|
self.deferred.callback(self.length)
|
2016-03-31 02:55:21 +02:00
|
|
|
elif reason.check(PotentialDataLoss):
|
|
|
|
# stolen from https://github.com/twisted/treq/pull/49/files
|
|
|
|
# http://twistedmatrix.com/trac/ticket/4840
|
|
|
|
self.deferred.callback(self.length)
|
2016-01-25 00:47:27 +01:00
|
|
|
else:
|
|
|
|
self.deferred.errback(reason)
|
|
|
|
|
|
|
|
|
2020-12-16 23:25:24 +01:00
|
|
|
def read_body_with_max_size(
|
2021-04-23 12:08:41 +02:00
|
|
|
response: IResponse, stream: ByteWriteable, max_size: Optional[int]
|
2021-07-28 14:04:11 +02:00
|
|
|
) -> "defer.Deferred[int]":
|
2020-11-25 19:30:47 +01:00
|
|
|
"""
|
|
|
|
Read a HTTP response body to a file-object. Optionally enforcing a maximum file size.
|
2016-01-25 00:47:27 +01:00
|
|
|
|
2020-12-16 23:25:24 +01:00
|
|
|
If the maximum file size is reached, the returned Deferred will resolve to a
|
|
|
|
Failure with a BodyExceededMaxSize exception.
|
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
Args:
|
|
|
|
response: The HTTP response to read from.
|
|
|
|
stream: The file-object to write to.
|
|
|
|
max_size: The maximum file size to allow.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to the length of the read body.
|
|
|
|
"""
|
2021-07-28 14:04:11 +02:00
|
|
|
d: "defer.Deferred[int]" = defer.Deferred()
|
2021-03-01 18:45:00 +01:00
|
|
|
|
2021-02-18 15:01:29 +01:00
|
|
|
# If the Content-Length header gives a size larger than the maximum allowed
|
|
|
|
# size, do not bother downloading the body.
|
|
|
|
if max_size is not None and response.length != UNKNOWN_LENGTH:
|
|
|
|
if response.length > max_size:
|
2021-03-01 18:45:00 +01:00
|
|
|
response.deliverBody(_DiscardBodyWithMaxSizeProtocol(d))
|
|
|
|
return d
|
2018-12-21 15:56:13 +01:00
|
|
|
|
2020-12-16 23:25:24 +01:00
|
|
|
response.deliverBody(_ReadBodyWithMaxSizeProtocol(stream, d, max_size))
|
2016-01-25 00:47:27 +01:00
|
|
|
return d
|
2014-11-20 14:53:34 +01:00
|
|
|
|
2016-04-03 13:56:29 +02:00
|
|
|
|
2022-04-08 14:06:51 +02:00
|
|
|
def encode_query_args(args: Optional[QueryParams]) -> bytes:
|
2020-11-25 19:30:47 +01:00
|
|
|
"""
|
|
|
|
Encodes a map of query arguments to bytes which can be appended to a URL.
|
2016-03-08 11:09:07 +01:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
Args:
|
|
|
|
args: The query arguments, a mapping of string to string or list of strings.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The query arguments encoded as bytes.
|
|
|
|
"""
|
|
|
|
if args is None:
|
|
|
|
return b""
|
2016-03-08 11:04:38 +01:00
|
|
|
|
2022-04-08 14:06:51 +02:00
|
|
|
query_str = urllib.parse.urlencode(args, True)
|
2016-03-08 11:09:07 +01:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
return query_str.encode("utf8")
|
2015-09-09 13:02:07 +02:00
|
|
|
|
|
|
|
|
2021-03-12 17:37:57 +01:00
|
|
|
@implementer(IPolicyForHTTPS)
|
2015-09-15 16:46:22 +02:00
|
|
|
class InsecureInterceptableContextFactory(ssl.ContextFactory):
|
2015-09-09 13:02:07 +02:00
|
|
|
"""
|
2015-09-15 16:46:22 +02:00
|
|
|
Factory for PyOpenSSL SSL contexts which accepts any certificate for any domain.
|
2015-09-09 13:02:07 +02:00
|
|
|
|
2015-09-15 16:46:22 +02:00
|
|
|
Do not use this since it allows an attacker to intercept your communications.
|
2015-09-09 14:05:00 +02:00
|
|
|
"""
|
2015-09-09 13:02:07 +02:00
|
|
|
|
2022-05-12 16:33:50 +02:00
|
|
|
def __init__(self) -> None:
|
2015-09-09 13:02:07 +02:00
|
|
|
self._context = SSL.Context(SSL.SSLv23_METHOD)
|
2021-10-08 15:49:41 +02:00
|
|
|
self._context.set_verify(VERIFY_NONE, lambda *_: False)
|
2015-09-09 13:02:07 +02:00
|
|
|
|
2023-02-28 16:17:55 +01:00
|
|
|
def getContext(self) -> SSL.Context:
|
2015-09-09 13:02:07 +02:00
|
|
|
return self._context
|
2016-04-19 16:39:08 +02:00
|
|
|
|
2023-02-28 16:17:55 +01:00
|
|
|
def creatorForNetloc(self, hostname: bytes, port: int) -> IOpenSSLContextFactory:
|
2016-04-19 16:39:08 +02:00
|
|
|
return self
|
2023-03-30 14:39:38 +02:00
|
|
|
|
|
|
|
|
|
|
|
def is_unknown_endpoint(
|
|
|
|
e: HttpResponseException, synapse_error: Optional[SynapseError] = None
|
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
Returns true if the response was due to an endpoint being unimplemented.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
e: The error response received from the remote server.
|
|
|
|
synapse_error: The above error converted to a SynapseError. This is
|
|
|
|
automatically generated if not provided.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if synapse_error is None:
|
|
|
|
synapse_error = e.to_synapse_error()
|
2023-04-03 19:20:32 +02:00
|
|
|
|
|
|
|
# Matrix v1.6 specifies that servers should return a 404 or 405 with an errcode
|
2023-03-30 14:39:38 +02:00
|
|
|
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
|
|
|
# to an unknown method, respectively.
|
|
|
|
#
|
2023-04-03 19:20:32 +02:00
|
|
|
# Older versions of servers don't return proper errors, so be graceful. But,
|
|
|
|
# also handle that some endpoints truly do return 404 errors.
|
2023-03-30 14:39:38 +02:00
|
|
|
return (
|
|
|
|
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
|
|
|
(e.code == 404 or e.code == 405)
|
|
|
|
and (
|
2023-04-03 19:20:32 +02:00
|
|
|
# Consider empty body or non-JSON bodies to be unrecognised (matches
|
|
|
|
# older Dendrites & Conduits).
|
2023-03-30 14:39:38 +02:00
|
|
|
not e.response
|
2023-04-03 19:20:32 +02:00
|
|
|
or not e.response.startswith(b"{")
|
2023-03-30 14:39:38 +02:00
|
|
|
# The proper response JSON with M_UNRECOGNIZED errcode.
|
|
|
|
or synapse_error.errcode == Codes.UNRECOGNIZED
|
|
|
|
)
|
|
|
|
) or (
|
|
|
|
# Older Synapses returned a 400 error.
|
|
|
|
e.code == 400
|
|
|
|
and synapse_error.errcode == Codes.UNRECOGNIZED
|
|
|
|
)
|