Merge remote-tracking branch 'upstream/master' into feat-dockerfile

pull/2846/head
kaiyou 2018-03-17 16:02:08 +01:00
commit 757f1b5843
27 changed files with 46 additions and 39 deletions

View File

@ -1,3 +1,12 @@
Changes in synapse v0.26.1 (2018-03-15)
=======================================
Bug fixes:
* Fix bug where an invalid event caused server to stop functioning correctly,
due to parsing and serializing bugs in ujson library.
Changes in synapse v0.26.0 (2018-01-05)
=======================================

View File

@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
__version__ = "0.26.0"
__version__ = "0.26.1"

View File

@ -17,7 +17,7 @@ from synapse.storage.presence import UserPresenceState
from synapse.types import UserID, RoomID
from twisted.internet import defer
import ujson as json
import simplejson as json
import jsonschema
from jsonschema import FormatChecker

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import ujson as json
import simplejson as json
import logging
from canonicaljson import encode_canonical_json

View File

@ -35,7 +35,7 @@ from canonicaljson import encode_canonical_json
import logging
import random
import ujson
import simplejson
logger = logging.getLogger(__name__)
@ -561,8 +561,8 @@ class MessageHandler(BaseHandler):
# Ensure that we can round trip before trying to persist in db
try:
dump = ujson.dumps(unfreeze(event.content))
ujson.loads(dump)
dump = simplejson.dumps(unfreeze(event.content))
simplejson.loads(dump)
except Exception:
logger.exception("Failed to encode content: %r", event.content)
raise

View File

@ -36,7 +36,7 @@ from twisted.web.util import redirectTo
import collections
import logging
import urllib
import ujson
import simplejson
logger = logging.getLogger(__name__)
@ -370,8 +370,7 @@ def respond_with_json(request, code, json_object, send_cors=False,
if canonical_json or synapse.events.USE_FROZEN_DICTS:
json_bytes = encode_canonical_json(json_object)
else:
# ujson doesn't like frozen_dicts.
json_bytes = ujson.dumps(json_object, ensure_ascii=False)
json_bytes = simplejson.dumps(json_object)
return respond_with_json_bytes(
request, code, json_bytes,

View File

@ -19,7 +19,7 @@ allowed to be sent by which side.
"""
import logging
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -29,7 +29,7 @@ from synapse.http.servlet import (
import logging
import urllib
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -33,7 +33,7 @@ from ._base import set_timeline_upper_limit
import itertools
import logging
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -35,7 +35,7 @@ import os
import re
import fnmatch
import cgi
import ujson as json
import simplejson as json
import urlparse
import itertools
import datetime

View File

@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks
import ujson as json
import simplejson as json
import logging
logger = logging.getLogger(__name__)

View File

@ -19,7 +19,7 @@ from . import engines
from twisted.internet import defer
import ujson as json
import simplejson as json
import logging
logger = logging.getLogger(__name__)

View File

@ -14,7 +14,7 @@
# limitations under the License.
import logging
import ujson
import simplejson
from twisted.internet import defer
@ -85,7 +85,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
)
rows = []
for destination, edu in remote_messages_by_destination.items():
edu_json = ujson.dumps(edu)
edu_json = simplejson.dumps(edu)
rows.append((destination, stream_id, now_ms, edu_json))
txn.executemany(sql, rows)
@ -177,7 +177,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
" WHERE user_id = ?"
)
txn.execute(sql, (user_id,))
message_json = ujson.dumps(messages_by_device["*"])
message_json = simplejson.dumps(messages_by_device["*"])
for row in txn:
# Add the message for all devices for this user on this
# server.
@ -199,7 +199,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
# Only insert into the local inbox if the device exists on
# this server
device = row[0]
message_json = ujson.dumps(messages_by_device[device])
message_json = simplejson.dumps(messages_by_device[device])
messages_json_for_user[device] = message_json
if messages_json_for_user:
@ -253,7 +253,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
messages = []
for row in txn:
stream_pos = row[0]
messages.append(ujson.loads(row[1]))
messages.append(simplejson.loads(row[1]))
if len(messages) < limit:
stream_pos = current_stream_id
return (messages, stream_pos)
@ -389,7 +389,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
messages = []
for row in txn:
stream_pos = row[0]
messages.append(ujson.loads(row[1]))
messages.append(simplejson.loads(row[1]))
if len(messages) < limit:
stream_pos = current_stream_id
return (messages, stream_pos)

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import ujson as json
import simplejson as json
from twisted.internet import defer

View File

@ -17,7 +17,7 @@ from twisted.internet import defer
from synapse.util.caches.descriptors import cached
from canonicaljson import encode_canonical_json
import ujson as json
import simplejson as json
from ._base import SQLBaseStore

View File

@ -21,7 +21,7 @@ from synapse.types import RoomStreamToken
from .stream import lower_bound
import logging
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -38,7 +38,7 @@ from functools import wraps
import synapse.metrics
import logging
import ujson as json
import simplejson as json
# these are only included to make the type annotations work
from synapse.events import EventBase # noqa: F401
@ -56,7 +56,6 @@ event_counter = metrics.register_counter(
def encode_json(json_object):
if USE_FROZEN_DICTS:
# ujson doesn't like frozen_dicts
return encode_canonical_json(json_object)
else:
return json.dumps(json_object, ensure_ascii=False)

View File

@ -20,7 +20,7 @@ from synapse.util.caches.stream_change_cache import StreamChangeCache
from twisted.internet import defer
import logging
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -23,7 +23,7 @@ from .engines import PostgresEngine, Sqlite3Engine
import collections
import logging
import ujson as json
import simplejson as json
import re
logger = logging.getLogger(__name__)

View File

@ -27,7 +27,7 @@ from synapse.api.constants import Membership, EventTypes
from synapse.types import get_domain_from_id
import logging
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -17,7 +17,7 @@ import logging
from synapse.storage.prepare_database import get_statements
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
import ujson
import simplejson
logger = logging.getLogger(__name__)
@ -66,7 +66,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"max_stream_id_exclusive": max_stream_id + 1,
"rows_inserted": 0,
}
progress_json = ujson.dumps(progress)
progress_json = simplejson.dumps(progress)
sql = (
"INSERT into background_updates (update_name, progress_json)"

View File

@ -16,7 +16,7 @@ import logging
from synapse.storage.prepare_database import get_statements
import ujson
import simplejson
logger = logging.getLogger(__name__)

View File

@ -16,7 +16,7 @@ from synapse.storage.engines import PostgresEngine
from synapse.storage.prepare_database import get_statements
import logging
import ujson
import simplejson
logger = logging.getLogger(__name__)
@ -49,7 +49,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"rows_inserted": 0,
"have_added_indexes": False,
}
progress_json = ujson.dumps(progress)
progress_json = simplejson.dumps(progress)
sql = (
"INSERT into background_updates (update_name, progress_json)"

View File

@ -15,7 +15,7 @@
from synapse.storage.prepare_database import get_statements
import logging
import ujson
import simplejson
logger = logging.getLogger(__name__)
@ -44,7 +44,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"max_stream_id_exclusive": max_stream_id + 1,
"rows_inserted": 0,
}
progress_json = ujson.dumps(progress)
progress_json = simplejson.dumps(progress)
sql = (
"INSERT into background_updates (update_name, progress_json)"

View File

@ -21,7 +21,7 @@ from synapse.storage.engines import PostgresEngine, Sqlite3Engine
import logging
import re
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)

View File

@ -17,7 +17,7 @@ from ._base import SQLBaseStore
from synapse.util.caches.descriptors import cached
from twisted.internet import defer
import ujson as json
import simplejson as json
import logging
logger = logging.getLogger(__name__)

View File

@ -23,7 +23,7 @@ from canonicaljson import encode_canonical_json
from collections import namedtuple
import logging
import ujson as json
import simplejson as json
logger = logging.getLogger(__name__)