2021-04-23 12:08:41 +02:00
|
|
|
# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
|
2014-11-20 18:41:56 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2021-05-20 17:11:48 +02:00
|
|
|
import abc
|
2016-11-30 16:03:00 +01:00
|
|
|
import cgi
|
2021-04-23 12:08:41 +02:00
|
|
|
import codecs
|
2014-11-20 18:41:56 +01:00
|
|
|
import logging
|
2015-11-02 17:49:05 +01:00
|
|
|
import random
|
2015-06-19 11:13:03 +02:00
|
|
|
import sys
|
2021-04-23 12:08:41 +02:00
|
|
|
import typing
|
2020-11-25 13:07:21 +01:00
|
|
|
import urllib.parse
|
2021-12-09 12:58:25 +01:00
|
|
|
from http import HTTPStatus
|
2021-04-23 12:08:41 +02:00
|
|
|
from io import BytesIO, StringIO
|
2021-05-20 17:11:48 +02:00
|
|
|
from typing import (
|
2021-10-22 19:15:41 +02:00
|
|
|
TYPE_CHECKING,
|
2021-05-20 17:11:48 +02:00
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Generic,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
overload,
|
|
|
|
)
|
2018-04-15 21:43:35 +02:00
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
import attr
|
2018-09-05 16:10:47 +02:00
|
|
|
import treq
|
|
|
|
from canonicaljson import encode_canonical_json
|
2018-05-22 02:47:37 +02:00
|
|
|
from prometheus_client import Counter
|
2018-07-09 08:09:20 +02:00
|
|
|
from signedjson.sign import sign_json
|
2021-05-20 17:11:48 +02:00
|
|
|
from typing_extensions import Literal
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2020-11-25 19:30:47 +01:00
|
|
|
from twisted.internet import defer
|
2018-07-09 08:09:20 +02:00
|
|
|
from twisted.internet.error import DNSLookupError
|
2020-12-02 17:09:24 +01:00
|
|
|
from twisted.internet.interfaces import IReactorTime
|
2018-09-18 19:17:15 +02:00
|
|
|
from twisted.internet.task import _EPSILON, Cooperator
|
2021-07-15 11:35:46 +02:00
|
|
|
from twisted.web.client import ResponseFailed
|
2018-07-09 08:09:20 +02:00
|
|
|
from twisted.web.http_headers import Headers
|
2020-11-25 13:07:21 +01:00
|
|
|
from twisted.web.iweb import IBodyProducer, IResponse
|
2018-07-09 08:09:20 +02:00
|
|
|
|
|
|
|
import synapse.metrics
|
|
|
|
import synapse.util.retryutils
|
|
|
|
from synapse.api.errors import (
|
2020-12-16 23:25:24 +01:00
|
|
|
Codes,
|
2018-07-09 08:09:20 +02:00
|
|
|
FederationDeniedError,
|
|
|
|
HttpResponseException,
|
2019-01-08 12:04:28 +01:00
|
|
|
RequestSendFailed,
|
2020-12-16 23:25:24 +01:00
|
|
|
SynapseError,
|
2018-07-09 08:09:20 +02:00
|
|
|
)
|
2019-02-20 12:35:52 +01:00
|
|
|
from synapse.http import QuieterFileBodyProducer
|
2020-11-25 19:30:47 +01:00
|
|
|
from synapse.http.client import (
|
|
|
|
BlacklistingAgentWrapper,
|
2020-12-16 23:25:24 +01:00
|
|
|
BodyExceededMaxSize,
|
2021-05-20 17:11:48 +02:00
|
|
|
ByteWriteable,
|
2020-11-25 19:30:47 +01:00
|
|
|
encode_query_args,
|
2020-12-16 23:25:24 +01:00
|
|
|
read_body_with_max_size,
|
2020-11-25 19:30:47 +01:00
|
|
|
)
|
2019-01-22 00:29:47 +01:00
|
|
|
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
|
2021-06-09 12:33:00 +02:00
|
|
|
from synapse.logging import opentracing
|
2021-09-14 14:01:30 +02:00
|
|
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
2021-06-09 12:33:00 +02:00
|
|
|
from synapse.logging.opentracing import set_tag, start_active_span, tags
|
2021-08-11 16:34:59 +02:00
|
|
|
from synapse.types import JsonDict
|
2020-09-10 20:55:25 +02:00
|
|
|
from synapse.util import json_decoder
|
2018-09-19 11:39:40 +02:00
|
|
|
from synapse.util.async_helpers import timeout_deferred
|
2018-09-14 16:11:26 +02:00
|
|
|
from synapse.util.metrics import Measure
|
2018-05-22 02:47:37 +02:00
|
|
|
|
2021-10-22 19:15:41 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
outgoing_requests_counter = Counter(
|
|
|
|
"synapse_http_matrixfederationclient_requests", "", ["method"]
|
|
|
|
)
|
|
|
|
incoming_responses_counter = Counter(
|
|
|
|
"synapse_http_matrixfederationclient_responses", "", ["method", "code"]
|
|
|
|
)
|
2015-02-24 20:51:21 +01:00
|
|
|
|
2021-04-23 12:08:41 +02:00
|
|
|
# a federation response can be rather large (eg a big state_ids is 50M or so), so we
|
|
|
|
# need a generous limit here.
|
|
|
|
MAX_RESPONSE_SIZE = 100 * 1024 * 1024
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-11-17 19:26:50 +01:00
|
|
|
MAX_LONG_RETRIES = 10
|
|
|
|
MAX_SHORT_RETRIES = 3
|
2020-05-15 20:17:06 +02:00
|
|
|
MAXINT = sys.maxsize
|
2018-09-05 16:10:47 +02:00
|
|
|
|
2015-11-02 17:49:05 +01:00
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
_next_id = 1
|
|
|
|
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
QueryArgs = Dict[str, Union[str, List[str]]]
|
|
|
|
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
T = TypeVar("T")
|
|
|
|
|
|
|
|
|
|
|
|
class ByteParser(ByteWriteable, Generic[T], abc.ABC):
|
|
|
|
"""A `ByteWriteable` that has an additional `finish` function that returns
|
|
|
|
the parsed data.
|
|
|
|
"""
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
CONTENT_TYPE: str = abc.abstractproperty() # type: ignore
|
2021-05-20 17:11:48 +02:00
|
|
|
"""The expected content type of the response, e.g. `application/json`. If
|
|
|
|
the content type doesn't match we fail the request.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@abc.abstractmethod
|
|
|
|
def finish(self) -> T:
|
|
|
|
"""Called when response has finished streaming and the parser should
|
|
|
|
return the final result (or error).
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class MatrixFederationRequest:
|
2022-01-13 14:49:28 +01:00
|
|
|
method: str
|
2018-09-18 19:17:15 +02:00
|
|
|
"""HTTP method
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
path: str
|
2018-09-18 19:17:15 +02:00
|
|
|
"""HTTP path
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
destination: str
|
2018-09-18 19:17:15 +02:00
|
|
|
"""The remote server to send the HTTP request to.
|
2020-11-25 13:07:21 +01:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
json: Optional[JsonDict] = None
|
2018-09-18 19:17:15 +02:00
|
|
|
"""JSON to send in the body.
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
json_callback: Optional[Callable[[], JsonDict]] = None
|
2018-09-18 19:17:15 +02:00
|
|
|
"""A callback to generate the JSON.
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
query: Optional[dict] = None
|
2018-09-18 19:17:15 +02:00
|
|
|
"""Query arguments.
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
txn_id: Optional[str] = None
|
2018-09-18 19:17:15 +02:00
|
|
|
"""Unique ID for this request (for logging)
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
uri: bytes = attr.ib(init=False)
|
2020-08-11 19:10:07 +02:00
|
|
|
"""The URI of this request
|
|
|
|
"""
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
def __attrs_post_init__(self) -> None:
|
2018-09-18 19:17:15 +02:00
|
|
|
global _next_id
|
2020-08-11 19:10:07 +02:00
|
|
|
txn_id = "%s-O-%s" % (self.method, _next_id)
|
2018-09-18 19:17:15 +02:00
|
|
|
_next_id = (_next_id + 1) % (MAXINT - 1)
|
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
object.__setattr__(self, "txn_id", txn_id)
|
|
|
|
|
|
|
|
destination_bytes = self.destination.encode("ascii")
|
|
|
|
path_bytes = self.path.encode("ascii")
|
|
|
|
if self.query:
|
|
|
|
query_bytes = encode_query_args(self.query)
|
|
|
|
else:
|
|
|
|
query_bytes = b""
|
|
|
|
|
|
|
|
# The object is frozen so we can pre-compute this.
|
|
|
|
uri = urllib.parse.urlunparse(
|
|
|
|
(b"matrix", destination_bytes, path_bytes, None, query_bytes, b"")
|
|
|
|
)
|
|
|
|
object.__setattr__(self, "uri", uri)
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
def get_json(self) -> Optional[JsonDict]:
|
2018-09-18 19:17:15 +02:00
|
|
|
if self.json_callback:
|
|
|
|
return self.json_callback()
|
|
|
|
return self.json
|
|
|
|
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
class JsonParser(ByteParser[Union[JsonDict, list]]):
|
|
|
|
"""A parser that buffers the response and tries to parse it as JSON."""
|
|
|
|
|
|
|
|
CONTENT_TYPE = "application/json"
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self._buffer = StringIO()
|
|
|
|
self._binary_wrapper = BinaryIOWrapper(self._buffer)
|
|
|
|
|
|
|
|
def write(self, data: bytes) -> int:
|
|
|
|
return self._binary_wrapper.write(data)
|
|
|
|
|
|
|
|
def finish(self) -> Union[JsonDict, list]:
|
|
|
|
return json_decoder.decode(self._buffer.getvalue())
|
|
|
|
|
|
|
|
|
|
|
|
async def _handle_response(
|
2020-08-11 19:10:07 +02:00
|
|
|
reactor: IReactorTime,
|
|
|
|
timeout_sec: float,
|
|
|
|
request: MatrixFederationRequest,
|
|
|
|
response: IResponse,
|
|
|
|
start_ms: int,
|
2021-05-20 17:11:48 +02:00
|
|
|
parser: ByteParser[T],
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size: Optional[int] = None,
|
2021-05-20 17:11:48 +02:00
|
|
|
) -> T:
|
2018-09-18 19:17:15 +02:00
|
|
|
"""
|
2021-05-20 17:11:48 +02:00
|
|
|
Reads the body of a response with a timeout and sends it to a parser
|
2018-09-18 19:17:15 +02:00
|
|
|
|
|
|
|
Args:
|
2020-08-11 19:10:07 +02:00
|
|
|
reactor: twisted reactor, for the timeout
|
|
|
|
timeout_sec: number of seconds to wait for response to complete
|
|
|
|
request: the request that triggered the response
|
|
|
|
response: response to the request
|
|
|
|
start_ms: Timestamp when request was made
|
2021-05-20 17:11:48 +02:00
|
|
|
parser: The parser for the response
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size: The maximum size to read from the response, if None
|
|
|
|
uses the default.
|
2018-09-18 19:17:15 +02:00
|
|
|
|
|
|
|
Returns:
|
2021-05-20 17:11:48 +02:00
|
|
|
The parsed response
|
2018-09-18 19:17:15 +02:00
|
|
|
"""
|
2021-05-20 17:11:48 +02:00
|
|
|
|
2021-05-28 17:28:01 +02:00
|
|
|
if max_response_size is None:
|
|
|
|
max_response_size = MAX_RESPONSE_SIZE
|
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
try:
|
2021-05-20 17:11:48 +02:00
|
|
|
check_content_type_is(response.headers, parser.CONTENT_TYPE)
|
2018-09-19 11:39:40 +02:00
|
|
|
|
2021-05-28 17:28:01 +02:00
|
|
|
d = read_body_with_max_size(response, parser, max_response_size)
|
2019-06-20 11:32:02 +02:00
|
|
|
d = timeout_deferred(d, timeout=timeout_sec, reactor=reactor)
|
2018-09-19 11:39:40 +02:00
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
length = await make_deferred_yieldable(d)
|
2021-04-23 12:08:41 +02:00
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
value = parser.finish()
|
2021-04-23 12:08:41 +02:00
|
|
|
except BodyExceededMaxSize as e:
|
|
|
|
# The response was too big.
|
|
|
|
logger.warning(
|
|
|
|
"{%s} [%s] JSON response exceeded max size %i - %s %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
MAX_RESPONSE_SIZE,
|
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
|
|
|
)
|
|
|
|
raise RequestSendFailed(e, can_retry=False) from e
|
2021-01-12 17:07:01 +01:00
|
|
|
except ValueError as e:
|
2021-05-20 17:11:48 +02:00
|
|
|
# The content was invalid.
|
2021-01-12 17:07:01 +01:00
|
|
|
logger.warning(
|
2021-05-20 17:11:48 +02:00
|
|
|
"{%s} [%s] Failed to parse response - %s %s",
|
2021-01-12 17:07:01 +01:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
|
|
|
)
|
|
|
|
raise RequestSendFailed(e, can_retry=False) from e
|
2020-09-29 11:29:21 +02:00
|
|
|
except defer.TimeoutError as e:
|
2020-05-22 12:39:20 +02:00
|
|
|
logger.warning(
|
2020-08-11 19:10:07 +02:00
|
|
|
"{%s} [%s] Timed out reading response - %s %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
2020-05-22 12:39:20 +02:00
|
|
|
)
|
|
|
|
raise RequestSendFailed(e, can_retry=True) from e
|
2021-07-15 11:35:46 +02:00
|
|
|
except ResponseFailed as e:
|
|
|
|
logger.warning(
|
|
|
|
"{%s} [%s] Failed to read response - %s %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
|
|
|
)
|
|
|
|
raise RequestSendFailed(e, can_retry=True) from e
|
2018-09-18 19:17:15 +02:00
|
|
|
except Exception as e:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning(
|
2020-08-11 19:10:07 +02:00
|
|
|
"{%s} [%s] Error reading response %s %s: %s",
|
2018-09-18 19:17:15 +02:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
2020-08-11 19:10:07 +02:00
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
2018-09-18 19:17:15 +02:00
|
|
|
e,
|
|
|
|
)
|
|
|
|
raise
|
2020-08-11 19:10:07 +02:00
|
|
|
|
|
|
|
time_taken_secs = reactor.seconds() - start_ms / 1000
|
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
logger.info(
|
2021-05-20 17:11:48 +02:00
|
|
|
"{%s} [%s] Completed request: %d %s in %.2f secs, got %d bytes - %s %s",
|
2018-09-18 19:17:15 +02:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
2019-06-20 11:32:02 +02:00
|
|
|
response.phrase.decode("ascii", errors="replace"),
|
2020-08-11 19:10:07 +02:00
|
|
|
time_taken_secs,
|
2021-05-20 17:11:48 +02:00
|
|
|
length,
|
2020-08-11 19:10:07 +02:00
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
2021-05-20 17:11:48 +02:00
|
|
|
return value
|
2018-09-18 19:17:15 +02:00
|
|
|
|
|
|
|
|
2021-04-23 12:08:41 +02:00
|
|
|
class BinaryIOWrapper:
|
|
|
|
"""A wrapper for a TextIO which converts from bytes on the fly."""
|
|
|
|
|
|
|
|
def __init__(self, file: typing.TextIO, encoding="utf-8", errors="strict"):
|
|
|
|
self.decoder = codecs.getincrementaldecoder(encoding)(errors)
|
|
|
|
self.file = file
|
|
|
|
|
|
|
|
def write(self, b: Union[bytes, bytearray]) -> int:
|
|
|
|
self.file.write(self.decoder.decode(b))
|
|
|
|
return len(b)
|
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class MatrixFederationHttpClient:
|
2014-11-20 19:00:10 +01:00
|
|
|
"""HTTP client used to talk to other homeservers over the federation
|
|
|
|
protocol. Send client certificates and signs requests.
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
Attributes:
|
|
|
|
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
|
|
|
requests.
|
|
|
|
"""
|
|
|
|
|
2021-10-22 19:15:41 +02:00
|
|
|
def __init__(self, hs: "HomeServer", tls_client_options_factory):
|
2014-11-20 18:41:56 +01:00
|
|
|
self.hs = hs
|
2020-07-08 18:51:56 +02:00
|
|
|
self.signing_key = hs.signing_key
|
2014-11-20 18:41:56 +01:00
|
|
|
self.server_name = hs.hostname
|
2019-05-13 20:05:06 +02:00
|
|
|
|
2021-08-11 16:34:59 +02:00
|
|
|
self.reactor = hs.get_reactor()
|
2019-05-13 20:05:06 +02:00
|
|
|
|
2020-06-16 16:43:29 +02:00
|
|
|
user_agent = hs.version_string
|
2021-09-29 12:44:15 +02:00
|
|
|
if hs.config.server.user_agent_suffix:
|
|
|
|
user_agent = "%s %s" % (user_agent, hs.config.server.user_agent_suffix)
|
2020-06-16 16:43:29 +02:00
|
|
|
user_agent = user_agent.encode("ascii")
|
|
|
|
|
2021-03-08 14:25:43 +01:00
|
|
|
federation_agent = MatrixFederationAgent(
|
2020-12-02 17:09:24 +01:00
|
|
|
self.reactor,
|
|
|
|
tls_client_options_factory,
|
|
|
|
user_agent,
|
2021-09-29 12:44:15 +02:00
|
|
|
hs.config.server.federation_ip_range_whitelist,
|
|
|
|
hs.config.server.federation_ip_range_blacklist,
|
2020-06-16 16:43:29 +02:00
|
|
|
)
|
2019-05-13 20:05:06 +02:00
|
|
|
|
|
|
|
# Use a BlacklistingAgentWrapper to prevent circumventing the IP
|
|
|
|
# blacklist via IP literals in server names
|
|
|
|
self.agent = BlacklistingAgentWrapper(
|
2021-03-08 14:25:43 +01:00
|
|
|
federation_agent,
|
2021-09-29 12:44:15 +02:00
|
|
|
ip_blacklist=hs.config.server.federation_ip_range_blacklist,
|
2019-05-13 20:05:06 +02:00
|
|
|
)
|
|
|
|
|
2015-02-10 19:17:27 +01:00
|
|
|
self.clock = hs.get_clock()
|
2017-03-23 01:12:21 +01:00
|
|
|
self._store = hs.get_datastore()
|
2019-06-20 11:32:02 +02:00
|
|
|
self.version_string_bytes = hs.version_string.encode("ascii")
|
2018-09-13 16:15:51 +02:00
|
|
|
self.default_timeout = 60
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
def schedule(x):
|
2019-05-13 20:05:06 +02:00
|
|
|
self.reactor.callLater(_EPSILON, x)
|
2018-09-18 19:17:15 +02:00
|
|
|
|
|
|
|
self._cooperator = Cooperator(scheduler=schedule)
|
2015-06-01 11:51:50 +02:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
async def _send_request_with_optional_trailing_slash(
|
2020-11-25 13:07:21 +01:00
|
|
|
self,
|
|
|
|
request: MatrixFederationRequest,
|
|
|
|
try_trailing_slash_on_400: bool = False,
|
2021-04-13 11:41:34 +02:00
|
|
|
**send_request_args,
|
2020-11-25 13:07:21 +01:00
|
|
|
) -> IResponse:
|
2019-03-13 19:26:06 +01:00
|
|
|
"""Wrapper for _send_request which can optionally retry the request
|
|
|
|
upon receiving a combination of a 400 HTTP response code and a
|
2019-03-20 12:27:18 +01:00
|
|
|
'M_UNRECOGNIZED' errcode. This is a workaround for Synapse <= v0.99.3
|
2019-03-13 19:26:06 +01:00
|
|
|
due to #3622.
|
|
|
|
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
request: details of request to be sent
|
|
|
|
try_trailing_slash_on_400: Whether on receiving a 400
|
2019-03-13 19:26:06 +01:00
|
|
|
'M_UNRECOGNIZED' from the server to retry the request with a
|
|
|
|
trailing slash appended to the request path.
|
2020-11-25 13:07:21 +01:00
|
|
|
send_request_args: A dictionary of arguments to pass to `_send_request()`.
|
2019-03-13 19:26:06 +01:00
|
|
|
|
2019-03-13 22:08:10 +01:00
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
|
2019-03-13 19:26:06 +01:00
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
Parsed JSON response body.
|
2019-03-13 19:26:06 +01:00
|
|
|
"""
|
2019-03-13 22:08:10 +01:00
|
|
|
try:
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request(request, **send_request_args)
|
2019-03-13 22:08:10 +01:00
|
|
|
except HttpResponseException as e:
|
2019-03-20 15:00:39 +01:00
|
|
|
# Received an HTTP error > 300. Check if it meets the requirements
|
|
|
|
# to retry with a trailing slash
|
2019-03-13 22:08:10 +01:00
|
|
|
if not try_trailing_slash_on_400:
|
2019-03-20 15:00:39 +01:00
|
|
|
raise
|
2019-03-13 19:26:06 +01:00
|
|
|
|
2019-03-20 15:00:39 +01:00
|
|
|
if e.code != 400 or e.to_synapse_error().errcode != "M_UNRECOGNIZED":
|
|
|
|
raise
|
2019-03-13 19:26:06 +01:00
|
|
|
|
2019-03-20 15:00:39 +01:00
|
|
|
# Retry with a trailing slash if we received a 400 with
|
|
|
|
# 'M_UNRECOGNIZED' which some endpoints can return when omitting a
|
|
|
|
# trailing slash on Synapse <= v0.99.3.
|
2019-03-26 12:35:29 +01:00
|
|
|
logger.info("Retrying request with trailing slash")
|
2020-08-11 19:10:07 +02:00
|
|
|
|
|
|
|
# Request is frozen so we create a new instance
|
|
|
|
request = attr.evolve(request, path=request.path + "/")
|
2019-03-13 22:08:10 +01:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request(request, **send_request_args)
|
2019-03-13 21:17:39 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return response
|
2019-03-13 19:26:06 +01:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
async def _send_request(
|
2018-09-18 19:17:15 +02:00
|
|
|
self,
|
2020-11-25 13:07:21 +01:00
|
|
|
request: MatrixFederationRequest,
|
|
|
|
retry_on_dns_fail: bool = True,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
long_retries: bool = False,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
backoff_on_404: bool = False,
|
|
|
|
) -> IResponse:
|
2018-09-05 16:10:47 +02:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
Sends a request to the given server.
|
2018-09-05 16:10:47 +02:00
|
|
|
|
2017-03-23 01:27:04 +01:00
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
request: details of request to be sent
|
|
|
|
|
|
|
|
retry_on_dns_fail: true if the request should be retied on DNS failures
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
timeout: number of milliseconds to wait for the response headers
|
2019-06-04 12:14:16 +02:00
|
|
|
(including connecting to the server), *for each attempt*.
|
|
|
|
60s by default.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
long_retries: whether to use the long retry algorithm.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
|
|
|
The regular retry algorithm makes 4 attempts, with intervals
|
|
|
|
[0.5s, 1s, 2s].
|
|
|
|
|
|
|
|
The long retry algorithm makes 11 attempts, with intervals
|
|
|
|
[4s, 16s, 60s, 60s, ...]
|
|
|
|
|
|
|
|
Both algorithms add -20%/+40% jitter to the retry intervals.
|
|
|
|
|
|
|
|
Note that the above intervals are *in addition* to the time spent
|
|
|
|
waiting for the request to complete (up to `timeout` ms).
|
|
|
|
|
|
|
|
NB: the long retry algorithm takes over 20 minutes to complete, with
|
|
|
|
a default timeout of 60s!
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
ignore_backoff: true to ignore the historical backoff data
|
2017-03-23 12:10:36 +01:00
|
|
|
and try the request anyway.
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
backoff_on_404: Back off if we get a 404
|
2017-03-13 14:50:16 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
Resolves with the HTTP response object on success.
|
2019-01-09 10:25:59 +01:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-11-20 18:41:56 +01:00
|
|
|
"""
|
2018-09-13 16:15:51 +02:00
|
|
|
if timeout:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
|
|
|
|
2018-01-22 19:11:18 +01:00
|
|
|
if (
|
2021-09-23 18:03:01 +02:00
|
|
|
self.hs.config.federation.federation_domain_whitelist is not None
|
|
|
|
and request.destination
|
|
|
|
not in self.hs.config.federation.federation_domain_whitelist
|
2018-01-22 19:11:18 +01:00
|
|
|
):
|
2018-09-18 19:17:15 +02:00
|
|
|
raise FederationDeniedError(request.destination)
|
2018-01-22 19:11:18 +01:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
limiter = await synapse.util.retryutils.get_retry_limiter(
|
2018-09-18 19:17:15 +02:00
|
|
|
request.destination,
|
2017-03-23 01:12:21 +01:00
|
|
|
self.clock,
|
|
|
|
self._store,
|
|
|
|
backoff_on_404=backoff_on_404,
|
2017-03-23 12:10:36 +01:00
|
|
|
ignore_backoff=ignore_backoff,
|
2017-03-23 01:12:21 +01:00
|
|
|
)
|
|
|
|
|
2018-10-16 11:44:49 +02:00
|
|
|
method_bytes = request.method.encode("ascii")
|
|
|
|
destination_bytes = request.destination.encode("ascii")
|
2018-09-18 19:17:15 +02:00
|
|
|
path_bytes = request.path.encode("ascii")
|
|
|
|
if request.query:
|
|
|
|
query_bytes = encode_query_args(request.query)
|
2018-09-05 16:10:47 +02:00
|
|
|
else:
|
|
|
|
query_bytes = b""
|
2017-03-23 01:27:04 +01:00
|
|
|
|
2019-08-16 17:13:25 +02:00
|
|
|
scope = start_active_span(
|
2019-07-11 11:36:03 +02:00
|
|
|
"outgoing-federation-request",
|
|
|
|
tags={
|
2019-08-16 17:13:25 +02:00
|
|
|
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
|
|
|
|
tags.PEER_ADDRESS: request.destination,
|
|
|
|
tags.HTTP_METHOD: request.method,
|
|
|
|
tags.HTTP_URL: request.path,
|
2019-07-11 11:36:03 +02:00
|
|
|
},
|
|
|
|
finish_on_close=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Inject the span into the headers
|
2021-07-15 12:02:43 +02:00
|
|
|
headers_dict: Dict[bytes, List[bytes]] = {}
|
2021-06-09 12:33:00 +02:00
|
|
|
opentracing.inject_header_dict(headers_dict, request.destination)
|
2018-09-05 16:10:47 +02:00
|
|
|
|
2019-07-11 11:36:03 +02:00
|
|
|
headers_dict[b"User-Agent"] = [self.version_string_bytes]
|
|
|
|
|
|
|
|
with limiter, scope:
|
2017-03-23 01:12:21 +01:00
|
|
|
# XXX: Would be much nicer to retry only at the transaction-layer
|
|
|
|
# (once we have reliable transactions in place)
|
|
|
|
if long_retries:
|
|
|
|
retries_left = MAX_LONG_RETRIES
|
|
|
|
else:
|
|
|
|
retries_left = MAX_SHORT_RETRIES
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
url_bytes = request.uri
|
2019-06-20 11:32:02 +02:00
|
|
|
url_str = url_bytes.decode("ascii")
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
url_to_sign_bytes = urllib.parse.urlunparse(
|
|
|
|
(b"", b"", path_bytes, None, query_bytes, b"")
|
|
|
|
)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2018-09-12 17:22:14 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2018-09-18 19:17:15 +02:00
|
|
|
json = request.get_json()
|
2018-09-12 17:22:14 +02:00
|
|
|
if json:
|
2018-10-16 11:44:49 +02:00
|
|
|
headers_dict[b"Content-Type"] = [b"application/json"]
|
2019-01-17 13:40:09 +01:00
|
|
|
auth_headers = self.build_auth_headers(
|
2019-06-20 11:32:02 +02:00
|
|
|
destination_bytes, method_bytes, url_to_sign_bytes, json
|
2018-04-23 01:53:18 +02:00
|
|
|
)
|
2018-10-16 11:44:49 +02:00
|
|
|
data = encode_canonical_json(json)
|
2021-07-15 12:02:43 +02:00
|
|
|
producer: Optional[IBodyProducer] = QuieterFileBodyProducer(
|
2019-06-20 11:32:02 +02:00
|
|
|
BytesIO(data), cooperator=self._cooperator
|
2021-07-15 12:02:43 +02:00
|
|
|
)
|
2018-09-18 19:17:15 +02:00
|
|
|
else:
|
|
|
|
producer = None
|
2019-01-17 13:40:09 +01:00
|
|
|
auth_headers = self.build_auth_headers(
|
2019-06-20 11:32:02 +02:00
|
|
|
destination_bytes, method_bytes, url_to_sign_bytes
|
2018-10-16 11:44:49 +02:00
|
|
|
)
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2019-01-17 13:40:09 +01:00
|
|
|
headers_dict[b"Authorization"] = auth_headers
|
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
logger.debug(
|
2019-01-22 00:29:47 +01:00
|
|
|
"{%s} [%s] Sending request: %s %s; timeout %fs",
|
2019-06-20 11:32:02 +02:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
|
|
|
_sec_timeout,
|
2018-10-16 11:44:49 +02:00
|
|
|
)
|
|
|
|
|
2020-05-14 18:01:34 +02:00
|
|
|
outgoing_requests_counter.labels(request.method).inc()
|
2020-01-28 19:59:48 +01:00
|
|
|
|
2019-01-08 12:04:28 +01:00
|
|
|
try:
|
|
|
|
with Measure(self.clock, "outbound_request"):
|
2019-01-18 13:07:38 +01:00
|
|
|
# we don't want all the fancy cookie and redirect handling
|
|
|
|
# that treq.request gives: just use the raw Agent.
|
2021-09-14 14:01:30 +02:00
|
|
|
|
|
|
|
# To preserve the logging context, the timeout is treated
|
|
|
|
# in a similar way to `defer.gatherResults`:
|
|
|
|
# * Each logging context-preserving fork is wrapped in
|
|
|
|
# `run_in_background`. In this case there is only one,
|
|
|
|
# since the timeout fork is not logging-context aware.
|
|
|
|
# * The `Deferred` that joins the forks back together is
|
|
|
|
# wrapped in `make_deferred_yieldable` to restore the
|
|
|
|
# logging context regardless of the path taken.
|
|
|
|
request_deferred = run_in_background(
|
|
|
|
self.agent.request,
|
2019-01-18 13:07:38 +01:00
|
|
|
method_bytes,
|
|
|
|
url_bytes,
|
|
|
|
headers=Headers(headers_dict),
|
|
|
|
bodyProducer=producer,
|
|
|
|
)
|
|
|
|
request_deferred = timeout_deferred(
|
|
|
|
request_deferred,
|
|
|
|
timeout=_sec_timeout,
|
2019-05-13 20:05:06 +02:00
|
|
|
reactor=self.reactor,
|
2019-01-18 13:07:38 +01:00
|
|
|
)
|
|
|
|
|
2021-09-14 14:01:30 +02:00
|
|
|
response = await make_deferred_yieldable(request_deferred)
|
2019-01-08 12:04:28 +01:00
|
|
|
except DNSLookupError as e:
|
2020-06-16 14:51:47 +02:00
|
|
|
raise RequestSendFailed(e, can_retry=retry_on_dns_fail) from e
|
2019-01-08 12:04:28 +01:00
|
|
|
except Exception as e:
|
2020-06-16 14:51:47 +02:00
|
|
|
raise RequestSendFailed(e, can_retry=True) from e
|
2019-01-08 12:04:28 +01:00
|
|
|
|
2020-05-14 18:01:34 +02:00
|
|
|
incoming_responses_counter.labels(
|
|
|
|
request.method, response.code
|
|
|
|
).inc()
|
2020-01-28 19:59:48 +01:00
|
|
|
|
2019-08-16 17:13:25 +02:00
|
|
|
set_tag(tags.HTTP_STATUS_CODE, response.code)
|
2020-07-29 19:56:06 +02:00
|
|
|
response_phrase = response.phrase.decode("ascii", errors="replace")
|
2019-07-11 11:36:03 +02:00
|
|
|
|
2019-01-08 12:04:28 +01:00
|
|
|
if 200 <= response.code < 300:
|
2020-04-22 12:18:18 +02:00
|
|
|
logger.debug(
|
|
|
|
"{%s} [%s] Got response headers: %d %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
2020-07-29 19:56:06 +02:00
|
|
|
response_phrase,
|
2020-04-22 12:18:18 +02:00
|
|
|
)
|
2019-01-08 12:04:28 +01:00
|
|
|
pass
|
|
|
|
else:
|
2020-04-22 12:18:18 +02:00
|
|
|
logger.info(
|
|
|
|
"{%s} [%s] Got response headers: %d %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
2020-07-29 19:56:06 +02:00
|
|
|
response_phrase,
|
2020-04-22 12:18:18 +02:00
|
|
|
)
|
2019-01-08 12:04:28 +01:00
|
|
|
# :'(
|
|
|
|
# Update transactions table?
|
|
|
|
d = treq.content(response)
|
|
|
|
d = timeout_deferred(
|
2019-06-20 11:32:02 +02:00
|
|
|
d, timeout=_sec_timeout, reactor=self.reactor
|
2019-01-08 12:04:28 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
2020-07-30 14:01:33 +02:00
|
|
|
body = await make_deferred_yieldable(d)
|
2019-01-08 12:04:28 +01:00
|
|
|
except Exception as e:
|
|
|
|
# Eh, we're already going to raise an exception so lets
|
|
|
|
# ignore if this fails.
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning(
|
2019-01-08 12:04:28 +01:00
|
|
|
"{%s} [%s] Failed to get error response: %s %s: %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
|
|
|
_flatten_response_never_received(e),
|
|
|
|
)
|
|
|
|
body = None
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
exc = HttpResponseException(
|
|
|
|
response.code, response_phrase, body
|
|
|
|
)
|
2018-09-12 17:22:14 +02:00
|
|
|
|
2021-03-09 14:15:12 +01:00
|
|
|
# Retry if the error is a 5xx or a 429 (Too Many
|
|
|
|
# Requests), otherwise just raise a standard
|
|
|
|
# `HttpResponseException`
|
|
|
|
if 500 <= response.code < 600 or response.code == 429:
|
2020-11-25 13:07:21 +01:00
|
|
|
raise RequestSendFailed(exc, can_retry=True) from exc
|
2019-01-08 12:04:28 +01:00
|
|
|
else:
|
2020-11-25 13:07:21 +01:00
|
|
|
raise exc
|
2019-01-08 12:04:28 +01:00
|
|
|
|
2018-09-12 17:22:14 +02:00
|
|
|
break
|
2019-01-08 12:04:28 +01:00
|
|
|
except RequestSendFailed as e:
|
2020-08-11 19:10:07 +02:00
|
|
|
logger.info(
|
2018-09-18 19:17:15 +02:00
|
|
|
"{%s} [%s] Request failed: %s %s: %s",
|
|
|
|
request.txn_id,
|
2018-10-16 11:44:49 +02:00
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
2019-01-08 12:04:28 +01:00
|
|
|
_flatten_response_never_received(e.inner_exception),
|
2018-09-12 17:22:14 +02:00
|
|
|
)
|
|
|
|
|
2019-01-08 12:04:28 +01:00
|
|
|
if not e.can_retry:
|
2018-09-18 19:17:15 +02:00
|
|
|
raise
|
2018-09-12 17:22:14 +02:00
|
|
|
|
|
|
|
if retries_left and not timeout:
|
|
|
|
if long_retries:
|
|
|
|
delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
|
|
|
|
delay = min(delay, 60)
|
|
|
|
delay *= random.uniform(0.8, 1.4)
|
2015-11-17 19:26:50 +01:00
|
|
|
else:
|
2018-09-12 17:22:14 +02:00
|
|
|
delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
|
|
|
|
delay = min(delay, 2)
|
|
|
|
delay *= random.uniform(0.8, 1.4)
|
|
|
|
|
2018-09-13 16:15:51 +02:00
|
|
|
logger.debug(
|
2018-09-18 19:17:15 +02:00
|
|
|
"{%s} [%s] Waiting %ss before re-sending...",
|
|
|
|
request.txn_id,
|
2018-10-16 11:44:49 +02:00
|
|
|
request.destination,
|
2018-09-13 16:15:51 +02:00
|
|
|
delay,
|
|
|
|
)
|
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
await self.clock.sleep(delay)
|
2018-09-12 17:22:14 +02:00
|
|
|
retries_left -= 1
|
|
|
|
else:
|
|
|
|
raise
|
2018-09-18 19:17:15 +02:00
|
|
|
|
2019-01-08 12:04:28 +01:00
|
|
|
except Exception as e:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning(
|
2019-01-08 12:04:28 +01:00
|
|
|
"{%s} [%s] Request failed: %s %s: %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
|
|
|
_flatten_response_never_received(e),
|
|
|
|
)
|
|
|
|
raise
|
2019-07-23 15:00:55 +02:00
|
|
|
return response
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2019-01-17 13:40:09 +01:00
|
|
|
def build_auth_headers(
|
2020-11-25 13:07:21 +01:00
|
|
|
self,
|
|
|
|
destination: Optional[bytes],
|
|
|
|
method: bytes,
|
|
|
|
url_bytes: bytes,
|
|
|
|
content: Optional[JsonDict] = None,
|
|
|
|
destination_is: Optional[bytes] = None,
|
|
|
|
) -> List[bytes]:
|
2018-06-04 13:05:58 +02:00
|
|
|
"""
|
2019-01-17 13:40:09 +01:00
|
|
|
Builds the Authorization headers for a federation request
|
2018-06-04 13:05:58 +02:00
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: The destination homeserver of the request.
|
2018-06-06 11:15:33 +02:00
|
|
|
May be None if the destination is an identity server, in which case
|
|
|
|
destination_is must be non-None.
|
2020-11-25 13:07:21 +01:00
|
|
|
method: The HTTP method of the request
|
|
|
|
url_bytes: The URI path of the request
|
|
|
|
content: The body of the request
|
|
|
|
destination_is: As 'destination', but if the destination is an
|
2018-06-05 19:10:35 +02:00
|
|
|
identity server
|
2018-06-04 13:05:58 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
A list of headers to be added as "Authorization:" headers
|
2018-06-04 13:05:58 +02:00
|
|
|
"""
|
2021-10-22 19:15:41 +02:00
|
|
|
request: JsonDict = {
|
2020-07-08 13:14:56 +02:00
|
|
|
"method": method.decode("ascii"),
|
|
|
|
"uri": url_bytes.decode("ascii"),
|
|
|
|
"origin": self.server_name,
|
|
|
|
}
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2018-05-23 15:38:56 +02:00
|
|
|
if destination is not None:
|
2020-07-08 13:14:56 +02:00
|
|
|
request["destination"] = destination.decode("ascii")
|
2018-05-23 15:38:56 +02:00
|
|
|
|
|
|
|
if destination_is is not None:
|
2020-07-08 13:14:56 +02:00
|
|
|
request["destination_is"] = destination_is.decode("ascii")
|
2018-05-23 15:38:56 +02:00
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
if content is not None:
|
|
|
|
request["content"] = content
|
|
|
|
|
|
|
|
request = sign_json(request, self.server_name, self.signing_key)
|
|
|
|
|
|
|
|
auth_headers = []
|
|
|
|
|
|
|
|
for key, sig in request["signatures"][self.server_name].items():
|
2019-06-20 11:32:02 +02:00
|
|
|
auth_headers.append(
|
|
|
|
(
|
|
|
|
'X-Matrix origin=%s,key="%s",sig="%s"'
|
|
|
|
% (self.server_name, key, sig)
|
|
|
|
).encode("ascii")
|
2018-09-05 16:10:47 +02:00
|
|
|
)
|
2019-01-17 13:40:09 +01:00
|
|
|
return auth_headers
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
@overload
|
2020-07-30 14:01:33 +02:00
|
|
|
async def put_json(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: str,
|
|
|
|
path: str,
|
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
data: Optional[JsonDict] = None,
|
|
|
|
json_data_callback: Optional[Callable[[], JsonDict]] = None,
|
|
|
|
long_retries: bool = False,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
backoff_on_404: bool = False,
|
|
|
|
try_trailing_slash_on_400: bool = False,
|
2021-05-20 17:11:48 +02:00
|
|
|
parser: Literal[None] = None,
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size: Optional[int] = None,
|
2020-11-25 13:07:21 +01:00
|
|
|
) -> Union[JsonDict, list]:
|
2021-05-20 17:11:48 +02:00
|
|
|
...
|
|
|
|
|
|
|
|
@overload
|
|
|
|
async def put_json(
|
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
path: str,
|
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
data: Optional[JsonDict] = None,
|
|
|
|
json_data_callback: Optional[Callable[[], JsonDict]] = None,
|
|
|
|
long_retries: bool = False,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
backoff_on_404: bool = False,
|
|
|
|
try_trailing_slash_on_400: bool = False,
|
|
|
|
parser: Optional[ByteParser[T]] = None,
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size: Optional[int] = None,
|
2021-05-20 17:11:48 +02:00
|
|
|
) -> T:
|
|
|
|
...
|
|
|
|
|
|
|
|
async def put_json(
|
|
|
|
self,
|
|
|
|
destination: str,
|
|
|
|
path: str,
|
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
data: Optional[JsonDict] = None,
|
|
|
|
json_data_callback: Optional[Callable[[], JsonDict]] = None,
|
|
|
|
long_retries: bool = False,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
backoff_on_404: bool = False,
|
|
|
|
try_trailing_slash_on_400: bool = False,
|
|
|
|
parser: Optional[ByteParser] = None,
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size: Optional[int] = None,
|
2021-05-20 17:11:48 +02:00
|
|
|
):
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Sends the specified json data using PUT
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: The remote server to send the HTTP request to.
|
|
|
|
path: The HTTP path.
|
|
|
|
args: query params
|
|
|
|
data: A dict containing the data that will be used as
|
2014-11-20 18:41:56 +01:00
|
|
|
the request body. This will be encoded as JSON.
|
2020-11-25 13:07:21 +01:00
|
|
|
json_data_callback: A callable returning the dict to
|
2014-11-20 18:41:56 +01:00
|
|
|
use as the request body.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
long_retries: whether to use the long retry algorithm. See
|
2019-06-04 12:14:16 +02:00
|
|
|
docs on _send_request for details.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
timeout: number of milliseconds to wait for the response.
|
2019-06-04 12:14:16 +02:00
|
|
|
self._default_timeout (60s) by default.
|
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
Note that we may make several attempts to send the request; this
|
|
|
|
timeout applies to the time spent waiting for response headers for
|
|
|
|
*each* attempt (including connection time) as well as the time spent
|
|
|
|
reading the response body after a 200 response.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
ignore_backoff: true to ignore the historical backoff data
|
2017-03-23 12:10:36 +01:00
|
|
|
and try the request anyway.
|
2020-11-25 13:07:21 +01:00
|
|
|
backoff_on_404: True if we should count a 404 response as
|
2017-03-23 01:12:21 +01:00
|
|
|
a failure of the server (and should therefore back off future
|
2019-03-12 15:11:11 +01:00
|
|
|
requests).
|
2020-11-25 13:07:21 +01:00
|
|
|
try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED
|
2019-03-13 19:26:06 +01:00
|
|
|
response we should try appending a trailing slash to the end
|
2019-03-20 12:27:18 +01:00
|
|
|
of the request. Workaround for #3622 in Synapse <= v0.99.3. This
|
2019-03-13 19:26:06 +01:00
|
|
|
will be attempted before backing off if backing off has been
|
|
|
|
enabled.
|
2021-05-20 17:11:48 +02:00
|
|
|
parser: The parser to use to decode the response. Defaults to
|
|
|
|
parsing as JSON.
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size: The maximum size to read from the response, if None
|
|
|
|
uses the default.
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
Succeeds when we get a 2xx HTTP response. The
|
2019-01-09 10:25:59 +01:00
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-11-20 18:41:56 +01:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
request = MatrixFederationRequest(
|
|
|
|
method="PUT",
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-09-05 16:10:47 +02:00
|
|
|
query=args,
|
2018-09-18 19:17:15 +02:00
|
|
|
json_callback=json_data_callback,
|
|
|
|
json=data,
|
|
|
|
)
|
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
start_ms = self.clock.time_msec()
|
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request_with_optional_trailing_slash(
|
2019-03-21 15:32:47 +01:00
|
|
|
request,
|
|
|
|
try_trailing_slash_on_400,
|
|
|
|
backoff_on_404=backoff_on_404,
|
|
|
|
ignore_backoff=ignore_backoff,
|
|
|
|
long_retries=long_retries,
|
|
|
|
timeout=timeout,
|
2019-03-20 15:00:39 +01:00
|
|
|
)
|
2019-03-12 15:11:11 +01:00
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
if timeout is not None:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
if parser is None:
|
|
|
|
parser = JsonParser()
|
|
|
|
|
|
|
|
body = await _handle_response(
|
|
|
|
self.reactor,
|
|
|
|
_sec_timeout,
|
|
|
|
request,
|
|
|
|
response,
|
|
|
|
start_ms,
|
|
|
|
parser=parser,
|
2021-05-28 17:28:01 +02:00
|
|
|
max_response_size=max_response_size,
|
2019-03-13 20:35:23 +01:00
|
|
|
)
|
2019-03-12 15:11:11 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return body
|
2015-01-29 14:44:52 +01:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
async def post_json(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: str,
|
|
|
|
path: str,
|
|
|
|
data: Optional[JsonDict] = None,
|
|
|
|
long_retries: bool = False,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
) -> Union[JsonDict, list]:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Sends the specified json data using POST
|
2015-01-29 14:44:52 +01:00
|
|
|
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: The remote server to send the HTTP request to.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
path: The HTTP path.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
data: A dict containing the data that will be used as
|
2015-01-29 14:44:52 +01:00
|
|
|
the request body. This will be encoded as JSON.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
long_retries: whether to use the long retry algorithm. See
|
2019-06-04 12:14:16 +02:00
|
|
|
docs on _send_request for details.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
timeout: number of milliseconds to wait for the response.
|
2019-06-04 12:14:16 +02:00
|
|
|
self._default_timeout (60s) by default.
|
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
Note that we may make several attempts to send the request; this
|
|
|
|
timeout applies to the time spent waiting for response headers for
|
|
|
|
*each* attempt (including connection time) as well as the time spent
|
|
|
|
reading the response body after a 200 response.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
ignore_backoff: true to ignore the historical backoff data and
|
2017-03-23 12:10:36 +01:00
|
|
|
try the request anyway.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
args: query params
|
2015-01-29 14:44:52 +01:00
|
|
|
Returns:
|
2020-07-30 14:01:33 +02:00
|
|
|
dict|list: Succeeds when we get a 2xx HTTP response. The
|
2019-01-09 10:25:59 +01:00
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2015-01-29 14:44:52 +01:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
|
|
|
|
request = MatrixFederationRequest(
|
2019-06-20 11:32:02 +02:00
|
|
|
method="POST", destination=destination, path=path, query=args, json=data
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
start_ms = self.clock.time_msec()
|
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request(
|
2018-09-18 19:17:15 +02:00
|
|
|
request,
|
2016-12-29 01:58:34 +01:00
|
|
|
long_retries=long_retries,
|
2016-09-12 19:17:09 +02:00
|
|
|
timeout=timeout,
|
2017-03-23 12:10:36 +01:00
|
|
|
ignore_backoff=ignore_backoff,
|
2015-01-29 14:44:52 +01:00
|
|
|
)
|
|
|
|
|
2018-09-18 19:17:15 +02:00
|
|
|
if timeout:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
2015-01-29 14:44:52 +01:00
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
body = await _handle_response(
|
|
|
|
self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser()
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return body
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
async def get_json(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: str,
|
|
|
|
path: str,
|
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
retry_on_dns_fail: bool = True,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
try_trailing_slash_on_400: bool = False,
|
|
|
|
) -> Union[JsonDict, list]:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""GETs some json from the given host homeserver and path
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: The remote server to send the HTTP request to.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
path: The HTTP path.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
args: A dictionary used to create query strings, defaults to
|
2014-11-20 18:41:56 +01:00
|
|
|
None.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
timeout: number of milliseconds to wait for the response.
|
2019-06-04 12:14:16 +02:00
|
|
|
self._default_timeout (60s) by default.
|
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
Note that we may make several attempts to send the request; this
|
|
|
|
timeout applies to the time spent waiting for response headers for
|
|
|
|
*each* attempt (including connection time) as well as the time spent
|
|
|
|
reading the response body after a 200 response.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
ignore_backoff: true to ignore the historical backoff data
|
2017-03-23 12:10:36 +01:00
|
|
|
and try the request anyway.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
try_trailing_slash_on_400: True if on a 400 M_UNRECOGNIZED
|
2019-03-13 19:26:06 +01:00
|
|
|
response we should try appending a trailing slash to the end of
|
2019-03-20 12:27:18 +01:00
|
|
|
the request. Workaround for #3622 in Synapse <= v0.99.3.
|
2014-11-20 18:41:56 +01:00
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
Succeeds when we get a 2xx HTTP response. The
|
2019-01-09 10:25:59 +01:00
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-11-20 18:41:56 +01:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
request = MatrixFederationRequest(
|
2019-06-20 11:32:02 +02:00
|
|
|
method="GET", destination=destination, path=path, query=args
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
start_ms = self.clock.time_msec()
|
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request_with_optional_trailing_slash(
|
2019-03-21 15:32:47 +01:00
|
|
|
request,
|
|
|
|
try_trailing_slash_on_400,
|
|
|
|
backoff_on_404=False,
|
2019-03-20 15:00:39 +01:00
|
|
|
ignore_backoff=ignore_backoff,
|
2019-03-21 15:32:47 +01:00
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
|
|
|
timeout=timeout,
|
2019-03-20 15:00:39 +01:00
|
|
|
)
|
2019-03-12 15:11:11 +01:00
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
if timeout is not None:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
body = await _handle_response(
|
|
|
|
self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser()
|
2019-03-13 20:35:23 +01:00
|
|
|
)
|
2019-03-13 13:10:33 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return body
|
2017-07-18 17:41:44 +02:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
async def delete_json(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: str,
|
|
|
|
path: str,
|
|
|
|
long_retries: bool = False,
|
|
|
|
timeout: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
) -> Union[JsonDict, list]:
|
2017-07-18 17:41:44 +02:00
|
|
|
"""Send a DELETE request to the remote expecting some json response
|
|
|
|
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: The remote server to send the HTTP request to.
|
|
|
|
path: The HTTP path.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
long_retries: whether to use the long retry algorithm. See
|
2019-06-04 12:14:16 +02:00
|
|
|
docs on _send_request for details.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
timeout: number of milliseconds to wait for the response.
|
2019-06-04 12:14:16 +02:00
|
|
|
self._default_timeout (60s) by default.
|
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
Note that we may make several attempts to send the request; this
|
|
|
|
timeout applies to the time spent waiting for response headers for
|
|
|
|
*each* attempt (including connection time) as well as the time spent
|
|
|
|
reading the response body after a 200 response.
|
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
ignore_backoff: true to ignore the historical backoff data and
|
2017-07-18 17:41:44 +02:00
|
|
|
try the request anyway.
|
2019-06-04 12:14:16 +02:00
|
|
|
|
2020-11-25 13:07:21 +01:00
|
|
|
args: query params
|
2017-07-18 17:41:44 +02:00
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
Succeeds when we get a 2xx HTTP response. The
|
2019-01-09 10:25:59 +01:00
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2017-07-18 17:41:44 +02:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
request = MatrixFederationRequest(
|
2019-06-20 11:32:02 +02:00
|
|
|
method="DELETE", destination=destination, path=path, query=args
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
|
|
|
|
2020-08-11 19:10:07 +02:00
|
|
|
start_ms = self.clock.time_msec()
|
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request(
|
2018-09-18 19:17:15 +02:00
|
|
|
request,
|
2017-07-18 17:41:44 +02:00
|
|
|
long_retries=long_retries,
|
|
|
|
timeout=timeout,
|
|
|
|
ignore_backoff=ignore_backoff,
|
|
|
|
)
|
|
|
|
|
2020-09-29 11:29:21 +02:00
|
|
|
if timeout is not None:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
body = await _handle_response(
|
|
|
|
self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser()
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return body
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
async def get_file(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: str,
|
|
|
|
path: str,
|
2019-06-20 11:32:02 +02:00
|
|
|
output_stream,
|
2020-11-25 13:07:21 +01:00
|
|
|
args: Optional[QueryArgs] = None,
|
|
|
|
retry_on_dns_fail: bool = True,
|
|
|
|
max_size: Optional[int] = None,
|
|
|
|
ignore_backoff: bool = False,
|
|
|
|
) -> Tuple[int, Dict[bytes, List[bytes]]]:
|
2014-12-04 15:22:31 +01:00
|
|
|
"""GETs a file from a given homeserver
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
destination: The remote server to send the HTTP request to.
|
|
|
|
path: The HTTP path to GET.
|
|
|
|
output_stream: File to write the response body to.
|
|
|
|
args: Optional dictionary used to create the query string.
|
|
|
|
ignore_backoff: true to ignore the historical backoff data
|
2017-03-23 12:10:36 +01:00
|
|
|
and try the request anyway.
|
2017-03-23 01:12:21 +01:00
|
|
|
|
2019-01-09 10:25:59 +01:00
|
|
|
Returns:
|
2020-11-25 13:07:21 +01:00
|
|
|
Resolves with an (int,dict) tuple of
|
2019-01-09 10:25:59 +01:00
|
|
|
the file length and a dict of the response headers.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-12-04 15:22:31 +01:00
|
|
|
"""
|
2018-09-18 19:17:15 +02:00
|
|
|
request = MatrixFederationRequest(
|
2019-06-20 11:32:02 +02:00
|
|
|
method="GET", destination=destination, path=path, query=args
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
|
|
|
|
2020-07-30 14:01:33 +02:00
|
|
|
response = await self._send_request(
|
2019-06-20 11:32:02 +02:00
|
|
|
request, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff
|
2014-12-04 15:22:31 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
headers = dict(response.headers.getAllRawHeaders())
|
|
|
|
|
2014-12-11 15:19:32 +01:00
|
|
|
try:
|
2020-12-16 23:25:24 +01:00
|
|
|
d = read_body_with_max_size(response, output_stream, max_size)
|
2019-05-13 20:05:06 +02:00
|
|
|
d.addTimeout(self.default_timeout, self.reactor)
|
2020-07-30 14:01:33 +02:00
|
|
|
length = await make_deferred_yieldable(d)
|
2020-12-16 23:25:24 +01:00
|
|
|
except BodyExceededMaxSize:
|
|
|
|
msg = "Requested file is too large > %r bytes" % (max_size,)
|
|
|
|
logger.warning(
|
2021-02-16 23:32:34 +01:00
|
|
|
"{%s} [%s] %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
msg,
|
2020-12-16 23:25:24 +01:00
|
|
|
)
|
2021-12-09 12:58:25 +01:00
|
|
|
raise SynapseError(HTTPStatus.BAD_GATEWAY, msg, Codes.TOO_LARGE)
|
2021-07-15 11:35:46 +02:00
|
|
|
except defer.TimeoutError as e:
|
|
|
|
logger.warning(
|
|
|
|
"{%s} [%s] Timed out reading response - %s %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
|
|
|
)
|
|
|
|
raise RequestSendFailed(e, can_retry=True) from e
|
|
|
|
except ResponseFailed as e:
|
|
|
|
logger.warning(
|
|
|
|
"{%s} [%s] Failed to read response - %s %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
|
|
|
)
|
|
|
|
raise RequestSendFailed(e, can_retry=True) from e
|
2018-09-18 19:17:15 +02:00
|
|
|
except Exception as e:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning(
|
2018-09-18 23:26:08 +02:00
|
|
|
"{%s} [%s] Error reading response: %s",
|
2018-09-18 19:17:15 +02:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
e,
|
|
|
|
)
|
2014-12-11 15:19:32 +01:00
|
|
|
raise
|
2018-09-18 19:17:15 +02:00
|
|
|
logger.info(
|
2020-08-11 19:10:07 +02:00
|
|
|
"{%s} [%s] Completed: %d %s [%d bytes] %s %s",
|
2018-09-18 19:17:15 +02:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
2019-06-20 11:32:02 +02:00
|
|
|
response.phrase.decode("ascii", errors="replace"),
|
2018-09-18 19:17:15 +02:00
|
|
|
length,
|
2020-08-11 19:10:07 +02:00
|
|
|
request.method,
|
|
|
|
request.uri.decode("ascii"),
|
2018-09-18 19:17:15 +02:00
|
|
|
)
|
2021-09-23 12:59:07 +02:00
|
|
|
return length, headers
|
2014-12-04 15:22:31 +01:00
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-02-18 11:50:10 +01:00
|
|
|
def _flatten_response_never_received(e):
|
|
|
|
if hasattr(e, "reasons"):
|
2017-09-28 14:44:47 +02:00
|
|
|
reasons = ", ".join(
|
2019-06-20 11:32:02 +02:00
|
|
|
_flatten_response_never_received(f.value) for f in e.reasons
|
2015-02-18 11:50:10 +01:00
|
|
|
)
|
2017-09-28 14:44:47 +02:00
|
|
|
|
|
|
|
return "%s:[%s]" % (type(e).__name__, reasons)
|
2015-02-18 11:50:10 +01:00
|
|
|
else:
|
2017-09-28 14:44:47 +02:00
|
|
|
return repr(e)
|
2016-11-30 16:03:00 +01:00
|
|
|
|
|
|
|
|
2021-05-20 17:11:48 +02:00
|
|
|
def check_content_type_is(headers: Headers, expected_content_type: str) -> None:
|
2016-11-30 16:03:00 +01:00
|
|
|
"""
|
|
|
|
Check that a set of HTTP headers have a Content-Type header, and that it
|
2021-05-20 17:11:48 +02:00
|
|
|
is the expected value..
|
2016-11-30 16:03:00 +01:00
|
|
|
|
|
|
|
Args:
|
2020-11-25 13:07:21 +01:00
|
|
|
headers: headers to check
|
2016-11-30 16:03:00 +01:00
|
|
|
|
|
|
|
Raises:
|
2021-05-20 17:11:48 +02:00
|
|
|
RequestSendFailed: if the Content-Type header is missing or doesn't match
|
2016-11-30 16:03:00 +01:00
|
|
|
|
|
|
|
"""
|
2021-03-03 21:47:38 +01:00
|
|
|
content_type_headers = headers.getRawHeaders(b"Content-Type")
|
|
|
|
if content_type_headers is None:
|
2020-11-11 15:22:40 +01:00
|
|
|
raise RequestSendFailed(
|
|
|
|
RuntimeError("No Content-Type header received from remote server"),
|
|
|
|
can_retry=False,
|
|
|
|
)
|
2016-11-30 16:03:00 +01:00
|
|
|
|
2021-03-03 21:47:38 +01:00
|
|
|
c_type = content_type_headers[0].decode("ascii") # only the first header
|
2016-11-30 16:03:00 +01:00
|
|
|
val, options = cgi.parse_header(c_type)
|
2021-05-20 17:11:48 +02:00
|
|
|
if val != expected_content_type:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise RequestSendFailed(
|
2020-11-11 15:22:40 +01:00
|
|
|
RuntimeError(
|
2021-05-20 17:11:48 +02:00
|
|
|
f"Remote server sent Content-Type header of '{c_type}', not '{expected_content_type}'",
|
2020-11-11 15:22:40 +01:00
|
|
|
),
|
2019-06-20 11:32:02 +02:00
|
|
|
can_retry=False,
|
|
|
|
)
|