Merge remote-tracking branch 'origin/release-v1.7.0' into matrix-org-hotfixes
commit
96b17d4e4f
23
CHANGES.md
23
CHANGES.md
|
@ -1,3 +1,26 @@
|
||||||
|
Synapse 1.7.0 (2019-12-13)
|
||||||
|
==========================
|
||||||
|
|
||||||
|
This release changes the default settings so that only local authenticated users can query the server's room directory. See the [upgrade notes](UPGRADE.rst#upgrading-to-v170) for details.
|
||||||
|
|
||||||
|
Support for SQLite versions before 3.11 is now deprecated. A future release will refuse to start if used with an SQLite version before 3.11.
|
||||||
|
|
||||||
|
Administrators are reminded that SQLite should not be used for production instances. Instructions for migrating to Postgres are available [here](docs/postgres.md). A future release of synapse will, by default, disable federation for servers using SQLite.
|
||||||
|
|
||||||
|
No significant changes since 1.7.0rc2.
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.7.0rc2 (2019-12-11)
|
||||||
|
=============================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix incorrect error message for invalid requests when setting user's avatar URL. ([\#6497](https://github.com/matrix-org/synapse/issues/6497))
|
||||||
|
- Fix support for SQLite 3.7. ([\#6499](https://github.com/matrix-org/synapse/issues/6499))
|
||||||
|
- Fix regression where sending email push would not work when using a pusher worker. ([\#6507](https://github.com/matrix-org/synapse/issues/6507), [\#6509](https://github.com/matrix-org/synapse/issues/6509))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.7.0rc1 (2019-12-09)
|
Synapse 1.7.0rc1 (2019-12-09)
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
Fix pusher worker failing because it can't retrieve retention policies for rooms.
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
matrix-synapse-py3 (1.7.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.7.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Fri, 13 Dec 2019 10:19:38 +0000
|
||||||
|
|
||||||
matrix-synapse-py3 (1.6.1) stable; urgency=medium
|
matrix-synapse-py3 (1.6.1) stable; urgency=medium
|
||||||
|
|
||||||
* New synapse release 1.6.1.
|
* New synapse release 1.6.1.
|
||||||
|
|
|
@ -36,7 +36,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
__version__ = "1.7.0rc1"
|
__version__ = "1.7.0"
|
||||||
|
|
||||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||||
# We import here so that we don't have to install a bunch of deps when
|
# We import here so that we don't have to install a bunch of deps when
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
|
|
||||||
""" This module contains REST servlets to do with profile: /profile/<paths> """
|
""" This module contains REST servlets to do with profile: /profile/<paths> """
|
||||||
|
|
||||||
|
from synapse.api.errors import Codes, SynapseError
|
||||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||||
from synapse.rest.client.v2_alpha._base import client_patterns
|
from synapse.rest.client.v2_alpha._base import client_patterns
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
@ -103,11 +104,15 @@ class ProfileAvatarURLRestServlet(RestServlet):
|
||||||
|
|
||||||
content = parse_json_object_from_request(request)
|
content = parse_json_object_from_request(request)
|
||||||
try:
|
try:
|
||||||
new_name = content["avatar_url"]
|
new_avatar_url = content["avatar_url"]
|
||||||
except Exception:
|
except KeyError:
|
||||||
return 400, "Unable to parse name"
|
raise SynapseError(
|
||||||
|
400, "Missing key 'avatar_url'", errcode=Codes.MISSING_PARAM
|
||||||
|
)
|
||||||
|
|
||||||
await self.profile_handler.set_avatar_url(user, requester, new_name, is_admin)
|
await self.profile_handler.set_avatar_url(
|
||||||
|
user, requester, new_avatar_url, is_admin
|
||||||
|
)
|
||||||
|
|
||||||
return 200, {}
|
return 200, {}
|
||||||
|
|
||||||
|
|
|
@ -1039,20 +1039,25 @@ class EventsStore(
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
async def _censor_redactions(self):
|
||||||
def _censor_redactions(self):
|
|
||||||
"""Censors all redactions older than the configured period that haven't
|
"""Censors all redactions older than the configured period that haven't
|
||||||
been censored yet.
|
been censored yet.
|
||||||
|
|
||||||
By censor we mean update the event_json table with the redacted event.
|
By censor we mean update the event_json table with the redacted event.
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deferred
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.hs.config.redaction_retention_period is None:
|
if self.hs.config.redaction_retention_period is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if not (
|
||||||
|
await self.db.updates.has_completed_background_update(
|
||||||
|
"redactions_have_censored_ts_idx"
|
||||||
|
)
|
||||||
|
):
|
||||||
|
# We don't want to run this until the appropriate index has been
|
||||||
|
# created.
|
||||||
|
return
|
||||||
|
|
||||||
before_ts = self._clock.time_msec() - self.hs.config.redaction_retention_period
|
before_ts = self._clock.time_msec() - self.hs.config.redaction_retention_period
|
||||||
|
|
||||||
# We fetch all redactions that:
|
# We fetch all redactions that:
|
||||||
|
@ -1074,15 +1079,15 @@ class EventsStore(
|
||||||
LIMIT ?
|
LIMIT ?
|
||||||
"""
|
"""
|
||||||
|
|
||||||
rows = yield self.db.execute(
|
rows = await self.db.execute(
|
||||||
"_censor_redactions_fetch", None, sql, before_ts, 100
|
"_censor_redactions_fetch", None, sql, before_ts, 100
|
||||||
)
|
)
|
||||||
|
|
||||||
updates = []
|
updates = []
|
||||||
|
|
||||||
for redaction_id, event_id in rows:
|
for redaction_id, event_id in rows:
|
||||||
redaction_event = yield self.get_event(redaction_id, allow_none=True)
|
redaction_event = await self.get_event(redaction_id, allow_none=True)
|
||||||
original_event = yield self.get_event(
|
original_event = await self.get_event(
|
||||||
event_id, allow_rejected=True, allow_none=True
|
event_id, allow_rejected=True, allow_none=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1115,7 +1120,7 @@ class EventsStore(
|
||||||
updatevalues={"have_censored": True},
|
updatevalues={"have_censored": True},
|
||||||
)
|
)
|
||||||
|
|
||||||
yield self.db.runInteraction("_update_censor_txn", _update_censor_txn)
|
await self.db.runInteraction("_update_censor_txn", _update_censor_txn)
|
||||||
|
|
||||||
def _censor_event_txn(self, txn, event_id, pruned_json):
|
def _censor_event_txn(self, txn, event_id, pruned_json):
|
||||||
"""Censor an event by replacing its JSON in the event_json table with the
|
"""Censor an event by replacing its JSON in the event_json table with the
|
||||||
|
|
|
@ -90,6 +90,14 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||||
"event_store_labels", self._event_store_labels
|
"event_store_labels", self._event_store_labels
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.db.updates.register_background_index_update(
|
||||||
|
"redactions_have_censored_ts_idx",
|
||||||
|
index_name="redactions_have_censored_ts",
|
||||||
|
table="redactions",
|
||||||
|
columns=["received_ts"],
|
||||||
|
where_clause="NOT have_censored",
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _background_reindex_fields_sender(self, progress, batch_size):
|
def _background_reindex_fields_sender(self, progress, batch_size):
|
||||||
target_min_stream_id = progress["target_min_stream_id_inclusive"]
|
target_min_stream_id = progress["target_min_stream_id_inclusive"]
|
||||||
|
|
|
@ -46,6 +46,11 @@ RatelimitOverride = collections.namedtuple(
|
||||||
|
|
||||||
|
|
||||||
class RoomWorkerStore(SQLBaseStore):
|
class RoomWorkerStore(SQLBaseStore):
|
||||||
|
def __init__(self, database: Database, db_conn, hs):
|
||||||
|
super(RoomWorkerStore, self).__init__(database, db_conn, hs)
|
||||||
|
|
||||||
|
self.config = hs.config
|
||||||
|
|
||||||
def get_room(self, room_id):
|
def get_room(self, room_id):
|
||||||
"""Retrieve a room.
|
"""Retrieve a room.
|
||||||
|
|
||||||
|
|
|
@ -14,4 +14,3 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ALTER TABLE redactions ADD COLUMN have_censored BOOL NOT NULL DEFAULT false;
|
ALTER TABLE redactions ADD COLUMN have_censored BOOL NOT NULL DEFAULT false;
|
||||||
CREATE INDEX redactions_have_censored ON redactions(event_id) WHERE not have_censored;
|
|
||||||
|
|
|
@ -14,7 +14,9 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ALTER TABLE redactions ADD COLUMN received_ts BIGINT;
|
ALTER TABLE redactions ADD COLUMN received_ts BIGINT;
|
||||||
CREATE INDEX redactions_have_censored_ts ON redactions(received_ts) WHERE not have_censored;
|
|
||||||
|
|
||||||
INSERT INTO background_updates (update_name, progress_json) VALUES
|
INSERT INTO background_updates (update_name, progress_json) VALUES
|
||||||
('redactions_received_ts', '{}');
|
('redactions_received_ts', '{}');
|
||||||
|
|
||||||
|
INSERT INTO background_updates (update_name, progress_json) VALUES
|
||||||
|
('redactions_have_censored_ts_idx', '{}');
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
/* Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS redactions_have_censored;
|
Loading…
Reference in New Issue