2015-01-16 14:21:14 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2015-01-16 14:21:14 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
2015-10-20 12:58:58 +02:00
|
|
|
from synapse.api.constants import Membership
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
from synapse.api.urls import FEDERATION_PREFIX as PREFIX
|
|
|
|
from synapse.util.logutils import log_function
|
|
|
|
|
|
|
|
import logging
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class TransportLayerClient(object):
|
|
|
|
"""Sends federation HTTP requests to other servers"""
|
|
|
|
|
2016-01-26 14:52:29 +01:00
|
|
|
def __init__(self, hs):
|
|
|
|
self.server_name = hs.hostname
|
|
|
|
self.client = hs.get_http_client()
|
|
|
|
|
2015-01-16 14:21:14 +01:00
|
|
|
@log_function
|
2015-01-16 19:59:04 +01:00
|
|
|
def get_room_state(self, destination, room_id, event_id):
|
|
|
|
""" Requests all state for a given room from the given server at the
|
|
|
|
given event.
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The host name of the remote home server we want
|
|
|
|
to get the state from.
|
|
|
|
context (str): The name of the context we want the state of
|
2015-01-16 19:20:19 +01:00
|
|
|
event_id (str): The event we want the context at.
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in a dict received from the remote homeserver.
|
|
|
|
"""
|
2015-01-16 19:59:04 +01:00
|
|
|
logger.debug("get_room_state dest=%s, room=%s",
|
|
|
|
destination, room_id)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2015-01-16 19:59:04 +01:00
|
|
|
path = PREFIX + "/state/%s/" % room_id
|
2015-01-16 19:20:19 +01:00
|
|
|
return self.client.get_json(
|
|
|
|
destination, path=path, args={"event_id": event_id},
|
2016-08-03 15:47:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
@log_function
|
|
|
|
def get_room_state_ids(self, destination, room_id, event_id):
|
|
|
|
""" Requests all state for a given room from the given server at the
|
|
|
|
given event. Returns the state's event_id's
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The host name of the remote home server we want
|
|
|
|
to get the state from.
|
|
|
|
context (str): The name of the context we want the state of
|
|
|
|
event_id (str): The event we want the context at.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in a dict received from the remote homeserver.
|
|
|
|
"""
|
|
|
|
logger.debug("get_room_state_ids dest=%s, room=%s",
|
|
|
|
destination, room_id)
|
|
|
|
|
|
|
|
path = PREFIX + "/state_ids/%s/" % room_id
|
|
|
|
return self.client.get_json(
|
|
|
|
destination, path=path, args={"event_id": event_id},
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
@log_function
|
2015-05-19 15:53:32 +02:00
|
|
|
def get_event(self, destination, event_id, timeout=None):
|
2015-01-16 14:21:14 +01:00
|
|
|
""" Requests the pdu with give id and origin from the given server.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The host name of the remote home server we want
|
|
|
|
to get the state from.
|
|
|
|
event_id (str): The id of the event being requested.
|
2015-05-22 16:18:04 +02:00
|
|
|
timeout (int): How long to try (in ms) the destination for before
|
|
|
|
giving up. None indicates no timeout.
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in a dict received from the remote homeserver.
|
|
|
|
"""
|
|
|
|
logger.debug("get_pdu dest=%s, event_id=%s",
|
|
|
|
destination, event_id)
|
|
|
|
|
2015-01-16 19:20:19 +01:00
|
|
|
path = PREFIX + "/event/%s/" % (event_id, )
|
2015-05-19 15:53:32 +02:00
|
|
|
return self.client.get_json(destination, path=path, timeout=timeout)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
@log_function
|
2015-01-16 19:59:04 +01:00
|
|
|
def backfill(self, destination, room_id, event_tuples, limit):
|
2015-01-16 14:21:14 +01:00
|
|
|
""" Requests `limit` previous PDUs in a given context before list of
|
|
|
|
PDUs.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
dest (str)
|
2015-01-16 19:59:04 +01:00
|
|
|
room_id (str)
|
2015-01-16 14:21:14 +01:00
|
|
|
event_tuples (list)
|
|
|
|
limt (int)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in a dict received from the remote homeserver.
|
|
|
|
"""
|
|
|
|
logger.debug(
|
2015-01-16 19:59:04 +01:00
|
|
|
"backfill dest=%s, room_id=%s, event_tuples=%s, limit=%s",
|
|
|
|
destination, room_id, repr(event_tuples), str(limit)
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if not event_tuples:
|
|
|
|
# TODO: raise?
|
|
|
|
return
|
|
|
|
|
2015-01-16 19:59:04 +01:00
|
|
|
path = PREFIX + "/backfill/%s/" % (room_id,)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
args = {
|
|
|
|
"v": event_tuples,
|
|
|
|
"limit": [str(limit)],
|
|
|
|
}
|
|
|
|
|
2015-01-16 19:20:19 +01:00
|
|
|
return self.client.get_json(
|
|
|
|
destination,
|
|
|
|
path=path,
|
2015-01-16 14:21:14 +01:00
|
|
|
args=args,
|
|
|
|
)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def send_transaction(self, transaction, json_data_callback=None):
|
|
|
|
""" Sends the given Transaction to its destination
|
|
|
|
|
|
|
|
Args:
|
|
|
|
transaction (Transaction)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results of the deferred is a tuple in the form of
|
|
|
|
(response_code, response_body) where the response_body is a
|
|
|
|
python dict decoded from json
|
|
|
|
"""
|
|
|
|
logger.debug(
|
|
|
|
"send_data dest=%s, txid=%s",
|
|
|
|
transaction.destination, transaction.transaction_id
|
|
|
|
)
|
|
|
|
|
|
|
|
if transaction.destination == self.server_name:
|
|
|
|
raise RuntimeError("Transport layer cannot send to itself!")
|
|
|
|
|
|
|
|
# FIXME: This is only used by the tests. The actual json sent is
|
|
|
|
# generated by the json_data_callback.
|
|
|
|
json_data = transaction.get_dict()
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
response = yield self.client.put_json(
|
2015-01-16 14:21:14 +01:00
|
|
|
transaction.destination,
|
|
|
|
path=PREFIX + "/send/%s/" % transaction.transaction_id,
|
|
|
|
data=json_data,
|
|
|
|
json_data_callback=json_data_callback,
|
2015-11-17 19:26:50 +01:00
|
|
|
long_retries=True,
|
2017-03-23 01:12:21 +01:00
|
|
|
backoff_on_404=True, # If we get a 404 the other side has gone
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.debug(
|
2015-02-04 17:28:12 +01:00
|
|
|
"send_data dest=%s, txid=%s, got response: 200",
|
|
|
|
transaction.destination, transaction.transaction_id,
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(response)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2017-03-23 12:10:36 +01:00
|
|
|
def make_query(self, destination, query_type, args, retry_on_dns_fail,
|
|
|
|
ignore_backoff=False):
|
2015-01-16 14:21:14 +01:00
|
|
|
path = PREFIX + "/query/%s" % query_type
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
content = yield self.client.get_json(
|
2015-01-16 14:21:14 +01:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
args=args,
|
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
2016-03-04 15:29:58 +01:00
|
|
|
timeout=10000,
|
2017-03-23 12:10:36 +01:00
|
|
|
ignore_backoff=ignore_backoff,
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(content)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2015-11-05 17:43:19 +01:00
|
|
|
def make_membership_event(self, destination, room_id, user_id, membership):
|
2017-04-21 01:46:54 +02:00
|
|
|
"""Asks a remote server to build and sign us a membership event
|
|
|
|
|
|
|
|
Note that this does not append any events to any graphs.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): address of remote homeserver
|
|
|
|
room_id (str): room to join/leave
|
|
|
|
user_id (str): user to be joined/left
|
|
|
|
membership (str): one of join/leave
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
|
|
|
will be the decoded JSON body (ie, the new event).
|
|
|
|
|
|
|
|
Fails with ``HTTPRequestException`` if we get an HTTP response
|
|
|
|
code >= 300.
|
|
|
|
|
|
|
|
Fails with ``NotRetryingDestination`` if we are not yet ready
|
|
|
|
to retry this server.
|
|
|
|
"""
|
2015-10-20 12:58:58 +02:00
|
|
|
valid_memberships = {Membership.JOIN, Membership.LEAVE}
|
|
|
|
if membership not in valid_memberships:
|
|
|
|
raise RuntimeError(
|
|
|
|
"make_membership_event called with membership='%s', must be one of %s" %
|
|
|
|
(membership, ",".join(valid_memberships))
|
|
|
|
)
|
|
|
|
path = PREFIX + "/make_%s/%s/%s" % (membership, room_id, user_id)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2017-04-21 02:50:36 +02:00
|
|
|
ignore_backoff = False
|
|
|
|
retry_on_dns_fail = False
|
|
|
|
|
|
|
|
if membership == Membership.LEAVE:
|
|
|
|
# we particularly want to do our best to send leave events. The
|
|
|
|
# problem is that if it fails, we won't retry it later, so if the
|
|
|
|
# remote server was just having a momentary blip, the room will be
|
|
|
|
# out of sync.
|
|
|
|
ignore_backoff = True
|
|
|
|
retry_on_dns_fail = True
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
content = yield self.client.get_json(
|
2015-01-16 14:21:14 +01:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2017-04-21 02:50:36 +02:00
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
2016-04-15 12:22:23 +02:00
|
|
|
timeout=20000,
|
2017-04-21 02:50:36 +02:00
|
|
|
ignore_backoff=ignore_backoff,
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(content)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2015-01-16 19:59:04 +01:00
|
|
|
def send_join(self, destination, room_id, event_id, content):
|
|
|
|
path = PREFIX + "/send_join/%s/%s" % (room_id, event_id)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
response = yield self.client.put_json(
|
2015-01-16 14:21:14 +01:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(response)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2015-10-20 12:58:58 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def send_leave(self, destination, room_id, event_id, content):
|
|
|
|
path = PREFIX + "/send_leave/%s/%s" % (room_id, event_id)
|
|
|
|
|
|
|
|
response = yield self.client.put_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
2017-04-21 02:50:36 +02:00
|
|
|
|
|
|
|
# we want to do our best to send this through. The problem is
|
|
|
|
# that if it fails, we won't retry it later, so if the remote
|
|
|
|
# server was just having a momentary blip, the room will be out of
|
|
|
|
# sync.
|
|
|
|
ignore_backoff=True,
|
2015-10-20 12:58:58 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
2015-01-16 14:21:14 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2015-01-16 19:59:04 +01:00
|
|
|
def send_invite(self, destination, room_id, event_id, content):
|
|
|
|
path = PREFIX + "/invite/%s/%s" % (room_id, event_id)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
response = yield self.client.put_json(
|
2015-01-16 14:21:14 +01:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
2017-03-23 12:10:36 +01:00
|
|
|
ignore_backoff=True,
|
2015-01-16 14:21:14 +01:00
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(response)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2016-05-31 18:20:07 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2016-09-16 11:24:15 +02:00
|
|
|
def get_public_rooms(self, remote_server, limit, since_token,
|
2016-12-06 11:43:48 +01:00
|
|
|
search_filter=None, include_all_networks=False,
|
|
|
|
third_party_instance_id=None):
|
2016-05-31 18:20:07 +02:00
|
|
|
path = PREFIX + "/publicRooms"
|
|
|
|
|
2016-12-06 11:43:48 +01:00
|
|
|
args = {
|
|
|
|
"include_all_networks": "true" if include_all_networks else "false",
|
|
|
|
}
|
|
|
|
if third_party_instance_id:
|
|
|
|
args["third_party_instance_id"] = third_party_instance_id,
|
2016-09-15 11:36:19 +02:00
|
|
|
if limit:
|
|
|
|
args["limit"] = [str(limit)]
|
|
|
|
if since_token:
|
|
|
|
args["since"] = [since_token]
|
|
|
|
|
2016-09-16 11:24:15 +02:00
|
|
|
# TODO(erikj): Actually send the search_filter across federation.
|
|
|
|
|
2016-05-31 18:20:07 +02:00
|
|
|
response = yield self.client.get_json(
|
|
|
|
destination=remote_server,
|
|
|
|
path=path,
|
2016-09-15 11:36:19 +02:00
|
|
|
args=args,
|
2017-03-23 12:10:36 +01:00
|
|
|
ignore_backoff=True,
|
2016-05-31 18:20:07 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
2015-11-05 17:43:19 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def exchange_third_party_invite(self, destination, room_id, event_dict):
|
|
|
|
path = PREFIX + "/exchange_third_party_invite/%s" % (room_id,)
|
|
|
|
|
|
|
|
response = yield self.client.put_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=event_dict,
|
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
2015-01-16 14:21:14 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2015-01-16 19:59:04 +01:00
|
|
|
def get_event_auth(self, destination, room_id, event_id):
|
|
|
|
path = PREFIX + "/event_auth/%s/%s" % (room_id, event_id)
|
2015-01-16 14:21:14 +01:00
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
content = yield self.client.get_json(
|
2015-01-16 14:21:14 +01:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(content)
|
2015-01-29 17:50:23 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def send_query_auth(self, destination, room_id, event_id, content):
|
|
|
|
path = PREFIX + "/query_auth/%s/%s" % (room_id, event_id)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
content = yield self.client.post_json(
|
2015-01-29 17:50:23 +01:00
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=content,
|
|
|
|
)
|
|
|
|
|
2015-02-04 17:28:12 +01:00
|
|
|
defer.returnValue(content)
|
2015-02-23 14:58:02 +01:00
|
|
|
|
2015-07-23 17:03:38 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2016-09-12 19:17:09 +02:00
|
|
|
def query_client_keys(self, destination, query_content, timeout):
|
2015-07-23 17:03:38 +02:00
|
|
|
"""Query the device keys for a list of user ids hosted on a remote
|
|
|
|
server.
|
|
|
|
|
|
|
|
Request:
|
|
|
|
{
|
|
|
|
"device_keys": {
|
|
|
|
"<user_id>": ["<device_id>"]
|
|
|
|
} }
|
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
|
|
|
"device_keys": {
|
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": {...}
|
|
|
|
} } }
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination(str): The server to query.
|
|
|
|
query_content(dict): The user ids to query.
|
|
|
|
Returns:
|
|
|
|
A dict containg the device keys.
|
|
|
|
"""
|
2015-07-24 19:26:46 +02:00
|
|
|
path = PREFIX + "/user/keys/query"
|
2015-07-23 17:03:38 +02:00
|
|
|
|
|
|
|
content = yield self.client.post_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=query_content,
|
2016-09-12 19:17:09 +02:00
|
|
|
timeout=timeout,
|
2015-07-23 17:03:38 +02:00
|
|
|
)
|
|
|
|
defer.returnValue(content)
|
|
|
|
|
2017-01-26 17:06:54 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def query_user_devices(self, destination, user_id, timeout):
|
|
|
|
"""Query the devices for a user id hosted on a remote server.
|
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
|
|
|
"stream_id": "...",
|
|
|
|
"devices": [ { ... } ]
|
|
|
|
}
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination(str): The server to query.
|
|
|
|
query_content(dict): The user ids to query.
|
|
|
|
Returns:
|
|
|
|
A dict containg the device keys.
|
|
|
|
"""
|
|
|
|
path = PREFIX + "/user/devices/" + user_id
|
|
|
|
|
|
|
|
content = yield self.client.get_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
timeout=timeout,
|
|
|
|
)
|
|
|
|
defer.returnValue(content)
|
|
|
|
|
2015-07-23 17:03:38 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
2016-09-12 19:17:09 +02:00
|
|
|
def claim_client_keys(self, destination, query_content, timeout):
|
2015-07-23 17:03:38 +02:00
|
|
|
"""Claim one-time keys for a list of devices hosted on a remote server.
|
|
|
|
|
|
|
|
Request:
|
|
|
|
{
|
|
|
|
"one_time_keys": {
|
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": "<algorithm>"
|
|
|
|
} } }
|
|
|
|
|
|
|
|
Response:
|
|
|
|
{
|
|
|
|
"device_keys": {
|
|
|
|
"<user_id>": {
|
|
|
|
"<device_id>": {
|
|
|
|
"<algorithm>:<key_id>": "<key_base64>"
|
|
|
|
} } } }
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination(str): The server to query.
|
|
|
|
query_content(dict): The user ids to query.
|
|
|
|
Returns:
|
|
|
|
A dict containg the one-time keys.
|
|
|
|
"""
|
|
|
|
|
2015-07-24 19:26:46 +02:00
|
|
|
path = PREFIX + "/user/keys/claim"
|
2015-07-23 17:03:38 +02:00
|
|
|
|
|
|
|
content = yield self.client.post_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data=query_content,
|
2016-09-12 19:17:09 +02:00
|
|
|
timeout=timeout,
|
2015-07-23 17:03:38 +02:00
|
|
|
)
|
|
|
|
defer.returnValue(content)
|
|
|
|
|
2015-02-23 14:58:02 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def get_missing_events(self, destination, room_id, earliest_events,
|
2016-12-31 16:21:37 +01:00
|
|
|
latest_events, limit, min_depth, timeout):
|
2015-02-23 14:58:02 +01:00
|
|
|
path = PREFIX + "/get_missing_events/%s" % (room_id,)
|
|
|
|
|
|
|
|
content = yield self.client.post_json(
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
|
|
|
data={
|
|
|
|
"limit": int(limit),
|
|
|
|
"min_depth": int(min_depth),
|
|
|
|
"earliest_events": earliest_events,
|
|
|
|
"latest_events": latest_events,
|
2016-12-31 16:21:37 +01:00
|
|
|
},
|
|
|
|
timeout=timeout,
|
2015-02-23 14:58:02 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(content)
|