Add 'raw' query parameter to expose the event graph and signatures to savvy clients.
parent
d44dd47fbf
commit
5720ab59e0
|
@ -89,7 +89,7 @@ def prune_event(event):
|
|||
return type(event)(allowed_fields)
|
||||
|
||||
|
||||
def serialize_event(hs, e):
|
||||
def serialize_event(hs, e, remove_data=True):
|
||||
# FIXME(erikj): To handle the case of presence events and the like
|
||||
if not isinstance(e, EventBase):
|
||||
return e
|
||||
|
@ -122,12 +122,13 @@ def serialize_event(hs, e):
|
|||
d["prev_content"] = e.unsigned["prev_content"]
|
||||
del d["unsigned"]["prev_content"]
|
||||
|
||||
del d["auth_events"]
|
||||
del d["prev_events"]
|
||||
del d["hashes"]
|
||||
del d["signatures"]
|
||||
d.pop("depth", None)
|
||||
d.pop("unsigned", None)
|
||||
d.pop("origin", None)
|
||||
if remove_data:
|
||||
del d["auth_events"]
|
||||
del d["prev_events"]
|
||||
del d["hashes"]
|
||||
del d["signatures"]
|
||||
d.pop("depth", None)
|
||||
d.pop("unsigned", None)
|
||||
d.pop("origin", None)
|
||||
|
||||
return d
|
||||
|
|
|
@ -46,7 +46,8 @@ class EventStreamHandler(BaseHandler):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_stream(self, auth_user_id, pagin_config, timeout=0):
|
||||
def get_stream(self, auth_user_id, pagin_config, timeout=0,
|
||||
trim_events=True):
|
||||
auth_user = self.hs.parse_userid(auth_user_id)
|
||||
|
||||
try:
|
||||
|
@ -78,7 +79,9 @@ class EventStreamHandler(BaseHandler):
|
|||
auth_user, room_ids, pagin_config, timeout
|
||||
)
|
||||
|
||||
chunks = [self.hs.serialize_event(e) for e in events]
|
||||
chunks = [
|
||||
self.hs.serialize_event(e, trim_events) for e in events
|
||||
]
|
||||
|
||||
chunk = {
|
||||
"chunk": chunks,
|
||||
|
|
|
@ -211,7 +211,7 @@ class MessageHandler(BaseHandler):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
||||
feedback=False):
|
||||
feedback=False, trim_events=True):
|
||||
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
||||
|
||||
This snapshot may include messages for all rooms where the user is
|
||||
|
@ -280,7 +280,9 @@ class MessageHandler(BaseHandler):
|
|||
end_token = now_token.copy_and_replace("room_key", token[1])
|
||||
|
||||
d["messages"] = {
|
||||
"chunk": [self.hs.serialize_event(m) for m in messages],
|
||||
"chunk": [
|
||||
self.hs.serialize_event(m, trim_events) for m in messages
|
||||
],
|
||||
"start": start_token.to_string(),
|
||||
"end": end_token.to_string(),
|
||||
}
|
||||
|
|
|
@ -44,8 +44,11 @@ class EventStreamRestServlet(RestServlet):
|
|||
except ValueError:
|
||||
raise SynapseError(400, "timeout must be in milliseconds.")
|
||||
|
||||
trim_events = "raw" not in request.args
|
||||
|
||||
chunk = yield handler.get_stream(
|
||||
auth_user.to_string(), pagin_config, timeout=timeout
|
||||
auth_user.to_string(), pagin_config, timeout=timeout,
|
||||
trim_events=trim_events
|
||||
)
|
||||
except:
|
||||
logger.exception("Event stream failed")
|
||||
|
|
|
@ -27,12 +27,15 @@ class InitialSyncRestServlet(RestServlet):
|
|||
def on_GET(self, request):
|
||||
user = yield self.auth.get_user_by_req(request)
|
||||
with_feedback = "feedback" in request.args
|
||||
trim_events = "raw" not in request.args
|
||||
pagination_config = PaginationConfig.from_request(request)
|
||||
handler = self.handlers.message_handler
|
||||
content = yield handler.snapshot_all_rooms(
|
||||
user_id=user.to_string(),
|
||||
pagin_config=pagination_config,
|
||||
feedback=with_feedback)
|
||||
feedback=with_feedback,
|
||||
trim_events=trim_events
|
||||
)
|
||||
|
||||
defer.returnValue((200, content))
|
||||
|
||||
|
|
|
@ -149,8 +149,8 @@ class BaseHomeServer(object):
|
|||
object."""
|
||||
return EventID.from_string(s)
|
||||
|
||||
def serialize_event(self, e):
|
||||
return serialize_event(self, e)
|
||||
def serialize_event(self, e, remove_data=True):
|
||||
return serialize_event(self, e, remove_data)
|
||||
|
||||
def get_ip_from_request(self, request):
|
||||
# May be an X-Forwarding-For header depending on config
|
||||
|
|
Loading…
Reference in New Issue