2014-11-20 18:41:56 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 14:21:39 +01:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-11-20 18:41:56 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
2014-12-04 15:22:31 +01:00
|
|
|
from twisted.internet import defer, reactor, protocol
|
2014-11-20 18:41:56 +01:00
|
|
|
from twisted.internet.error import DNSLookupError
|
2015-06-01 11:51:50 +02:00
|
|
|
from twisted.web.client import readBody, HTTPConnectionPool, Agent
|
2014-11-20 18:41:56 +01:00
|
|
|
from twisted.web.http_headers import Headers
|
2014-12-04 15:22:31 +01:00
|
|
|
from twisted.web._newclient import ResponseDone
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
from synapse.http.endpoint import matrix_federation_endpoint
|
|
|
|
from synapse.util.async import sleep
|
2015-05-08 17:32:18 +02:00
|
|
|
from synapse.util.logcontext import preserve_context_over_fn
|
2015-02-24 20:51:21 +01:00
|
|
|
import synapse.metrics
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-08-24 17:17:38 +02:00
|
|
|
from canonicaljson import encode_canonical_json
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
from synapse.api.errors import (
|
|
|
|
SynapseError, Codes, HttpResponseException,
|
|
|
|
)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-08-24 17:17:38 +02:00
|
|
|
from signedjson.sign import sign_json
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-02-11 15:23:10 +01:00
|
|
|
import simplejson as json
|
2014-11-20 18:41:56 +01:00
|
|
|
import logging
|
2015-06-19 11:13:03 +02:00
|
|
|
import sys
|
2014-11-20 18:41:56 +01:00
|
|
|
import urllib
|
|
|
|
import urlparse
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2015-06-19 11:13:03 +02:00
|
|
|
outbound_logger = logging.getLogger("synapse.http.outbound")
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-02-24 20:51:21 +01:00
|
|
|
metrics = synapse.metrics.get_metrics_for(__name__)
|
|
|
|
|
2015-03-12 17:24:38 +01:00
|
|
|
outgoing_requests_counter = metrics.register_counter(
|
|
|
|
"requests",
|
2015-03-06 16:28:06 +01:00
|
|
|
labels=["method"],
|
2015-02-24 20:51:21 +01:00
|
|
|
)
|
2015-03-12 17:24:38 +01:00
|
|
|
incoming_responses_counter = metrics.register_counter(
|
|
|
|
"responses",
|
|
|
|
labels=["method", "code"],
|
2015-02-24 20:51:21 +01:00
|
|
|
)
|
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-06-01 11:51:50 +02:00
|
|
|
class MatrixFederationEndpointFactory(object):
|
|
|
|
def __init__(self, hs):
|
2015-09-09 13:02:07 +02:00
|
|
|
self.tls_server_context_factory = hs.tls_server_context_factory
|
2015-03-12 17:24:38 +01:00
|
|
|
|
2015-06-01 11:51:50 +02:00
|
|
|
def endpointForURI(self, uri):
|
|
|
|
destination = uri.netloc
|
2015-02-24 20:51:21 +01:00
|
|
|
|
2015-06-01 11:51:50 +02:00
|
|
|
return matrix_federation_endpoint(
|
|
|
|
reactor, destination, timeout=10,
|
2015-09-09 13:02:07 +02:00
|
|
|
ssl_context_factory=self.tls_server_context_factory
|
2015-06-01 11:51:50 +02:00
|
|
|
)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
|
|
|
|
class MatrixFederationHttpClient(object):
|
2014-11-20 19:00:10 +01:00
|
|
|
"""HTTP client used to talk to other homeservers over the federation
|
|
|
|
protocol. Send client certificates and signs requests.
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
Attributes:
|
|
|
|
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
|
|
|
requests.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.hs = hs
|
|
|
|
self.signing_key = hs.config.signing_key[0]
|
|
|
|
self.server_name = hs.hostname
|
2015-05-28 16:34:00 +02:00
|
|
|
pool = HTTPConnectionPool(reactor)
|
2015-05-28 16:45:46 +02:00
|
|
|
pool.maxPersistentPerHost = 10
|
2015-06-01 11:51:50 +02:00
|
|
|
self.agent = Agent.usingEndpointFactory(
|
|
|
|
reactor, MatrixFederationEndpointFactory(hs), pool=pool
|
|
|
|
)
|
2015-02-10 19:17:27 +01:00
|
|
|
self.clock = hs.get_clock()
|
2015-02-18 17:51:33 +01:00
|
|
|
self.version_string = hs.version_string
|
2015-06-19 11:13:03 +02:00
|
|
|
self._next_id = 1
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-06-01 11:51:50 +02:00
|
|
|
def _create_url(self, destination, path_bytes, param_bytes, query_bytes):
|
|
|
|
return urlparse.urlunparse(
|
|
|
|
("matrix", destination, path_bytes, param_bytes, query_bytes, "")
|
|
|
|
)
|
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _create_request(self, destination, method, path_bytes,
|
|
|
|
body_callback, headers_dict={}, param_bytes=b"",
|
2015-05-19 15:53:32 +02:00
|
|
|
query_bytes=b"", retry_on_dns_fail=True,
|
|
|
|
timeout=None):
|
2014-11-20 18:41:56 +01:00
|
|
|
""" Creates and sends a request to the given url
|
|
|
|
"""
|
2015-02-18 17:51:33 +01:00
|
|
|
headers_dict[b"User-Agent"] = [self.version_string]
|
2014-11-20 18:41:56 +01:00
|
|
|
headers_dict[b"Host"] = [destination]
|
|
|
|
|
2015-06-01 11:51:50 +02:00
|
|
|
url_bytes = self._create_url(
|
|
|
|
destination, path_bytes, param_bytes, query_bytes
|
2014-11-20 18:41:56 +01:00
|
|
|
)
|
|
|
|
|
2015-06-19 12:45:55 +02:00
|
|
|
txn_id = "%s-O-%s" % (method, self._next_id)
|
2015-06-19 11:13:03 +02:00
|
|
|
self._next_id = (self._next_id + 1) % (sys.maxint - 1)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-06-19 11:13:03 +02:00
|
|
|
outbound_logger.info(
|
|
|
|
"{%s} [%s] Sending request: %s %s",
|
|
|
|
txn_id, destination, method, url_bytes
|
2014-11-20 18:41:56 +01:00
|
|
|
)
|
|
|
|
|
2014-12-10 11:28:27 +01:00
|
|
|
# XXX: Would be much nicer to retry only at the transaction-layer
|
|
|
|
# (once we have reliable transactions in place)
|
2014-11-20 18:41:56 +01:00
|
|
|
retries_left = 5
|
|
|
|
|
2015-06-01 11:51:50 +02:00
|
|
|
http_url_bytes = urlparse.urlunparse(
|
|
|
|
("", "", path_bytes, param_bytes, query_bytes, "")
|
|
|
|
)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-06-19 11:13:03 +02:00
|
|
|
log_result = None
|
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
producer = None
|
|
|
|
if body_callback:
|
2015-08-12 18:07:22 +02:00
|
|
|
producer = body_callback(method, http_url_bytes, headers_dict)
|
2015-06-19 11:13:03 +02:00
|
|
|
|
|
|
|
try:
|
2015-06-19 12:45:55 +02:00
|
|
|
def send_request():
|
2015-08-12 18:07:22 +02:00
|
|
|
request_deferred = preserve_context_over_fn(
|
|
|
|
self.agent.request,
|
2015-06-19 12:45:55 +02:00
|
|
|
method,
|
2015-08-12 18:07:22 +02:00
|
|
|
url_bytes,
|
2015-06-19 12:45:55 +02:00
|
|
|
Headers(headers_dict),
|
|
|
|
producer
|
|
|
|
)
|
|
|
|
|
|
|
|
return self.clock.time_bound_deferred(
|
|
|
|
request_deferred,
|
|
|
|
time_out=timeout/1000. if timeout else 60,
|
|
|
|
)
|
2015-06-19 11:13:03 +02:00
|
|
|
|
2015-06-19 12:45:55 +02:00
|
|
|
response = yield preserve_context_over_fn(
|
|
|
|
send_request,
|
2015-06-19 11:13:03 +02:00
|
|
|
)
|
2015-05-08 17:32:18 +02:00
|
|
|
|
2015-06-19 11:13:03 +02:00
|
|
|
log_result = "%d %s" % (response.code, response.phrase,)
|
|
|
|
break
|
|
|
|
except Exception as e:
|
|
|
|
if not retry_on_dns_fail and isinstance(e, DNSLookupError):
|
|
|
|
logger.warn(
|
|
|
|
"DNS Lookup failed to %s with %s",
|
|
|
|
destination,
|
|
|
|
e
|
|
|
|
)
|
|
|
|
log_result = "DNS Lookup failed to %s with %s" % (
|
|
|
|
destination, e
|
|
|
|
)
|
|
|
|
raise
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2014-12-10 11:16:09 +01:00
|
|
|
logger.warn(
|
2015-06-19 11:13:03 +02:00
|
|
|
"{%s} Sending request failed to %s: %s %s: %s - %s",
|
|
|
|
txn_id,
|
2014-12-10 11:16:09 +01:00
|
|
|
destination,
|
2015-06-19 11:13:03 +02:00
|
|
|
method,
|
|
|
|
url_bytes,
|
|
|
|
type(e).__name__,
|
|
|
|
_flatten_response_never_received(e),
|
2014-12-10 11:16:09 +01:00
|
|
|
)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2015-06-19 11:13:03 +02:00
|
|
|
log_result = "%s - %s" % (
|
|
|
|
type(e).__name__, _flatten_response_never_received(e),
|
|
|
|
)
|
|
|
|
|
|
|
|
if retries_left and not timeout:
|
|
|
|
yield sleep(2 ** (5 - retries_left))
|
|
|
|
retries_left -= 1
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
outbound_logger.info(
|
|
|
|
"{%s} [%s] Result: %s",
|
|
|
|
txn_id,
|
|
|
|
destination,
|
|
|
|
log_result,
|
|
|
|
)
|
2014-12-10 11:16:09 +01:00
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
if 200 <= response.code < 300:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# :'(
|
|
|
|
# Update transactions table?
|
2015-06-19 12:45:55 +02:00
|
|
|
body = yield preserve_context_over_fn(readBody, response)
|
2015-02-04 17:28:12 +01:00
|
|
|
raise HttpResponseException(
|
2015-02-06 11:55:01 +01:00
|
|
|
response.code, response.phrase, body
|
2014-11-20 18:41:56 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
def sign_request(self, destination, method, url_bytes, headers_dict,
|
|
|
|
content=None):
|
|
|
|
request = {
|
|
|
|
"method": method,
|
|
|
|
"uri": url_bytes,
|
|
|
|
"origin": self.server_name,
|
|
|
|
"destination": destination,
|
|
|
|
}
|
|
|
|
|
|
|
|
if content is not None:
|
|
|
|
request["content"] = content
|
|
|
|
|
|
|
|
request = sign_json(request, self.server_name, self.signing_key)
|
|
|
|
|
|
|
|
auth_headers = []
|
|
|
|
|
|
|
|
for key, sig in request["signatures"][self.server_name].items():
|
|
|
|
auth_headers.append(bytes(
|
|
|
|
"X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
|
|
|
|
self.server_name, key, sig,
|
|
|
|
)
|
|
|
|
))
|
|
|
|
|
|
|
|
headers_dict[b"Authorization"] = auth_headers
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def put_json(self, destination, path, data={}, json_data_callback=None):
|
|
|
|
""" Sends the specifed json data using PUT
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
|
|
|
data (dict): A dict containing the data that will be used as
|
|
|
|
the request body. This will be encoded as JSON.
|
|
|
|
json_data_callback (callable): A callable returning the dict to
|
|
|
|
use as the request body.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
|
|
|
will be the decoded JSON body. On a 4xx or 5xx error response a
|
|
|
|
CodeMessageException is raised.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not json_data_callback:
|
|
|
|
def json_data_callback():
|
|
|
|
return data
|
|
|
|
|
|
|
|
def body_callback(method, url_bytes, headers_dict):
|
|
|
|
json_data = json_data_callback()
|
|
|
|
self.sign_request(
|
|
|
|
destination, method, url_bytes, headers_dict, json_data
|
|
|
|
)
|
|
|
|
producer = _JsonProducer(json_data)
|
|
|
|
return producer
|
|
|
|
|
|
|
|
response = yield self._create_request(
|
|
|
|
destination.encode("ascii"),
|
|
|
|
"PUT",
|
|
|
|
path.encode("ascii"),
|
|
|
|
body_callback=body_callback,
|
|
|
|
headers_dict={"Content-Type": ["application/json"]},
|
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
if 200 <= response.code < 300:
|
|
|
|
# We need to update the transactions table to say it was sent?
|
|
|
|
c_type = response.headers.getRawHeaders("Content-Type")
|
|
|
|
|
|
|
|
if "application/json" not in c_type:
|
|
|
|
raise RuntimeError(
|
|
|
|
"Content-Type not application/json"
|
|
|
|
)
|
|
|
|
|
2015-06-19 12:45:55 +02:00
|
|
|
body = yield preserve_context_over_fn(readBody, response)
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(json.loads(body))
|
2015-01-29 14:44:52 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def post_json(self, destination, path, data={}):
|
|
|
|
""" Sends the specifed json data using POST
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
|
|
|
data (dict): A dict containing the data that will be used as
|
|
|
|
the request body. This will be encoded as JSON.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
|
|
|
will be the decoded JSON body. On a 4xx or 5xx error response a
|
|
|
|
CodeMessageException is raised.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def body_callback(method, url_bytes, headers_dict):
|
|
|
|
self.sign_request(
|
|
|
|
destination, method, url_bytes, headers_dict, data
|
|
|
|
)
|
2015-01-29 14:58:22 +01:00
|
|
|
return _JsonProducer(data)
|
2015-01-29 14:44:52 +01:00
|
|
|
|
|
|
|
response = yield self._create_request(
|
|
|
|
destination.encode("ascii"),
|
|
|
|
"POST",
|
|
|
|
path.encode("ascii"),
|
|
|
|
body_callback=body_callback,
|
|
|
|
headers_dict={"Content-Type": ["application/json"]},
|
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
if 200 <= response.code < 300:
|
|
|
|
# We need to update the transactions table to say it was sent?
|
|
|
|
c_type = response.headers.getRawHeaders("Content-Type")
|
|
|
|
|
|
|
|
if "application/json" not in c_type:
|
|
|
|
raise RuntimeError(
|
|
|
|
"Content-Type not application/json"
|
|
|
|
)
|
|
|
|
|
2015-06-19 12:45:55 +02:00
|
|
|
body = yield preserve_context_over_fn(readBody, response)
|
2015-01-29 14:44:52 +01:00
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(json.loads(body))
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-05-19 15:53:32 +02:00
|
|
|
def get_json(self, destination, path, args={}, retry_on_dns_fail=True,
|
|
|
|
timeout=None):
|
2014-12-04 15:22:31 +01:00
|
|
|
""" GETs some json from the given host homeserver and path
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
|
|
|
args (dict): A dictionary used to create query strings, defaults to
|
|
|
|
None.
|
2015-05-22 16:18:04 +02:00
|
|
|
timeout (int): How long to try (in ms) the destination for before
|
|
|
|
giving up. None indicates no timeout and that the request will
|
|
|
|
be retried.
|
2014-11-20 18:41:56 +01:00
|
|
|
Returns:
|
|
|
|
Deferred: Succeeds when we get *any* HTTP response.
|
|
|
|
|
|
|
|
The result of the deferred is a tuple of `(code, response)`,
|
|
|
|
where `response` is a dict representing the decoded JSON body.
|
|
|
|
"""
|
|
|
|
logger.debug("get_json args: %s", args)
|
|
|
|
|
|
|
|
encoded_args = {}
|
|
|
|
for k, vs in args.items():
|
|
|
|
if isinstance(vs, basestring):
|
|
|
|
vs = [vs]
|
|
|
|
encoded_args[k] = [v.encode("UTF-8") for v in vs]
|
|
|
|
|
|
|
|
query_bytes = urllib.urlencode(encoded_args, True)
|
|
|
|
logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail)
|
|
|
|
|
|
|
|
def body_callback(method, url_bytes, headers_dict):
|
|
|
|
self.sign_request(destination, method, url_bytes, headers_dict)
|
|
|
|
return None
|
|
|
|
|
|
|
|
response = yield self._create_request(
|
|
|
|
destination.encode("ascii"),
|
|
|
|
"GET",
|
|
|
|
path.encode("ascii"),
|
|
|
|
query_bytes=query_bytes,
|
|
|
|
body_callback=body_callback,
|
2015-05-19 15:53:32 +02:00
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
|
|
|
timeout=timeout,
|
2014-11-20 18:41:56 +01:00
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
if 200 <= response.code < 300:
|
|
|
|
# We need to update the transactions table to say it was sent?
|
|
|
|
c_type = response.headers.getRawHeaders("Content-Type")
|
|
|
|
|
|
|
|
if "application/json" not in c_type:
|
|
|
|
raise RuntimeError(
|
|
|
|
"Content-Type not application/json"
|
|
|
|
)
|
|
|
|
|
2015-06-19 12:45:55 +02:00
|
|
|
body = yield preserve_context_over_fn(readBody, response)
|
2014-11-20 18:41:56 +01:00
|
|
|
|
|
|
|
defer.returnValue(json.loads(body))
|
|
|
|
|
2014-12-04 15:22:31 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_file(self, destination, path, output_stream, args={},
|
2014-12-11 15:19:32 +01:00
|
|
|
retry_on_dns_fail=True, max_size=None):
|
2014-12-04 15:22:31 +01:00
|
|
|
"""GETs a file from a given homeserver
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request to.
|
|
|
|
path (str): The HTTP path to GET.
|
|
|
|
output_stream (file): File to write the response body to.
|
|
|
|
args (dict): Optional dictionary used to create the query string.
|
|
|
|
Returns:
|
|
|
|
A (int,dict) tuple of the file length and a dict of the response
|
|
|
|
headers.
|
|
|
|
"""
|
|
|
|
|
|
|
|
encoded_args = {}
|
|
|
|
for k, vs in args.items():
|
|
|
|
if isinstance(vs, basestring):
|
|
|
|
vs = [vs]
|
|
|
|
encoded_args[k] = [v.encode("UTF-8") for v in vs]
|
|
|
|
|
|
|
|
query_bytes = urllib.urlencode(encoded_args, True)
|
|
|
|
logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail)
|
|
|
|
|
|
|
|
def body_callback(method, url_bytes, headers_dict):
|
|
|
|
self.sign_request(destination, method, url_bytes, headers_dict)
|
|
|
|
return None
|
|
|
|
|
|
|
|
response = yield self._create_request(
|
|
|
|
destination.encode("ascii"),
|
|
|
|
"GET",
|
|
|
|
path.encode("ascii"),
|
|
|
|
query_bytes=query_bytes,
|
|
|
|
body_callback=body_callback,
|
|
|
|
retry_on_dns_fail=retry_on_dns_fail
|
|
|
|
)
|
|
|
|
|
|
|
|
headers = dict(response.headers.getAllRawHeaders())
|
|
|
|
|
2014-12-11 15:19:32 +01:00
|
|
|
try:
|
2015-06-19 12:45:55 +02:00
|
|
|
length = yield preserve_context_over_fn(
|
|
|
|
_readBodyToFile,
|
|
|
|
response, output_stream, max_size
|
|
|
|
)
|
2014-12-11 15:19:32 +01:00
|
|
|
except:
|
|
|
|
logger.exception("Failed to download body")
|
|
|
|
raise
|
2014-12-04 15:22:31 +01:00
|
|
|
|
|
|
|
defer.returnValue((length, headers))
|
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
|
2014-12-04 15:22:31 +01:00
|
|
|
class _ReadBodyToFileProtocol(protocol.Protocol):
|
2014-12-11 15:19:32 +01:00
|
|
|
def __init__(self, stream, deferred, max_size):
|
2014-12-04 15:22:31 +01:00
|
|
|
self.stream = stream
|
|
|
|
self.deferred = deferred
|
|
|
|
self.length = 0
|
2014-12-11 15:19:32 +01:00
|
|
|
self.max_size = max_size
|
2014-12-04 15:22:31 +01:00
|
|
|
|
|
|
|
def dataReceived(self, data):
|
|
|
|
self.stream.write(data)
|
|
|
|
self.length += len(data)
|
2014-12-11 15:19:32 +01:00
|
|
|
if self.max_size is not None and self.length >= self.max_size:
|
|
|
|
self.deferred.errback(SynapseError(
|
|
|
|
502,
|
|
|
|
"Requested file is too large > %r bytes" % (self.max_size,),
|
|
|
|
Codes.TOO_LARGE,
|
|
|
|
))
|
|
|
|
self.deferred = defer.Deferred()
|
|
|
|
self.transport.loseConnection()
|
2014-12-04 15:22:31 +01:00
|
|
|
|
|
|
|
def connectionLost(self, reason):
|
|
|
|
if reason.check(ResponseDone):
|
|
|
|
self.deferred.callback(self.length)
|
|
|
|
else:
|
|
|
|
self.deferred.errback(reason)
|
|
|
|
|
|
|
|
|
2014-12-11 15:19:32 +01:00
|
|
|
def _readBodyToFile(response, stream, max_size):
|
2014-12-04 15:22:31 +01:00
|
|
|
d = defer.Deferred()
|
2014-12-11 15:19:32 +01:00
|
|
|
response.deliverBody(_ReadBodyToFileProtocol(stream, d, max_size))
|
2014-12-04 15:22:31 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
|
2014-11-20 18:41:56 +01:00
|
|
|
class _JsonProducer(object):
|
|
|
|
""" Used by the twisted http client to create the HTTP body from json
|
|
|
|
"""
|
|
|
|
def __init__(self, jsn):
|
|
|
|
self.reset(jsn)
|
|
|
|
|
|
|
|
def reset(self, jsn):
|
|
|
|
self.body = encode_canonical_json(jsn)
|
|
|
|
self.length = len(self.body)
|
|
|
|
|
|
|
|
def startProducing(self, consumer):
|
|
|
|
consumer.write(self.body)
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def pauseProducing(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def stopProducing(self):
|
2014-11-20 19:00:10 +01:00
|
|
|
pass
|
2015-02-18 11:50:10 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _flatten_response_never_received(e):
|
|
|
|
if hasattr(e, "reasons"):
|
|
|
|
return ", ".join(
|
|
|
|
_flatten_response_never_received(f.value)
|
|
|
|
for f in e.reasons
|
|
|
|
)
|
|
|
|
else:
|
2015-02-18 11:51:32 +01:00
|
|
|
return "%s: %s" % (type(e).__name__, e.message,)
|