Stop writing to column `user_id` of tables `profiles` and `user_filters` (#15787)
parent
677272caed
commit
f25b0f8808
|
@ -0,0 +1 @@
|
|||
Stop writing to column `user_id` of tables `profiles` and `user_filters`.
|
|
@ -98,6 +98,8 @@ UNIQUE_INDEX_BACKGROUND_UPDATES = {
|
|||
"event_push_summary": "event_push_summary_unique_index2",
|
||||
"receipts_linearized": "receipts_linearized_unique_index",
|
||||
"receipts_graph": "receipts_graph_unique_index",
|
||||
"profiles": "profiles_full_user_id_key_idx",
|
||||
"user_filters": "full_users_filters_unique_idx",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast
|
||||
|
||||
from synapse.api.constants import Direction
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
|
@ -196,7 +196,7 @@ class DataStore(
|
|||
txn: LoggingTransaction,
|
||||
) -> Tuple[List[JsonDict], int]:
|
||||
filters = []
|
||||
args = [self.hs.config.server.server_name]
|
||||
args: List[Union[str, int]] = []
|
||||
|
||||
# Set ordering
|
||||
order_by_column = UserSortOrder(order_by).value
|
||||
|
@ -263,7 +263,7 @@ class DataStore(
|
|||
|
||||
sql_base = f"""
|
||||
FROM users as u
|
||||
LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
|
||||
LEFT JOIN profiles AS p ON u.name = p.full_user_id
|
||||
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
|
||||
{where_clause}
|
||||
"""
|
||||
|
|
|
@ -188,14 +188,13 @@ class FilteringWorkerStore(SQLBaseStore):
|
|||
filter_id = max_id + 1
|
||||
|
||||
sql = (
|
||||
"INSERT INTO user_filters (full_user_id, user_id, filter_id, filter_json)"
|
||||
"VALUES(?, ?, ?, ?)"
|
||||
"INSERT INTO user_filters (full_user_id, filter_id, filter_json)"
|
||||
"VALUES(?, ?, ?)"
|
||||
)
|
||||
txn.execute(
|
||||
sql,
|
||||
(
|
||||
user_id.to_string(),
|
||||
user_id.localpart,
|
||||
filter_id,
|
||||
bytearray(def_json),
|
||||
),
|
||||
|
|
|
@ -173,10 +173,9 @@ class ProfileWorkerStore(SQLBaseStore):
|
|||
)
|
||||
|
||||
async def create_profile(self, user_id: UserID) -> None:
|
||||
user_localpart = user_id.localpart
|
||||
await self.db_pool.simple_insert(
|
||||
table="profiles",
|
||||
values={"user_id": user_localpart, "full_user_id": user_id.to_string()},
|
||||
values={"full_user_id": user_id.to_string()},
|
||||
desc="create_profile",
|
||||
)
|
||||
|
||||
|
@ -191,13 +190,11 @@ class ProfileWorkerStore(SQLBaseStore):
|
|||
new_displayname: The new display name. If this is None, the user's display
|
||||
name is removed.
|
||||
"""
|
||||
user_localpart = user_id.localpart
|
||||
await self.db_pool.simple_upsert(
|
||||
table="profiles",
|
||||
keyvalues={"user_id": user_localpart},
|
||||
keyvalues={"full_user_id": user_id.to_string()},
|
||||
values={
|
||||
"displayname": new_displayname,
|
||||
"full_user_id": user_id.to_string(),
|
||||
},
|
||||
desc="set_profile_displayname",
|
||||
)
|
||||
|
@ -213,11 +210,10 @@ class ProfileWorkerStore(SQLBaseStore):
|
|||
new_avatar_url: The new avatar URL. If this is None, the user's avatar is
|
||||
removed.
|
||||
"""
|
||||
user_localpart = user_id.localpart
|
||||
await self.db_pool.simple_upsert(
|
||||
table="profiles",
|
||||
keyvalues={"user_id": user_localpart},
|
||||
values={"avatar_url": new_avatar_url, "full_user_id": user_id.to_string()},
|
||||
keyvalues={"full_user_id": user_id.to_string()},
|
||||
values={"avatar_url": new_avatar_url},
|
||||
desc="set_profile_avatar_url",
|
||||
)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
SCHEMA_VERSION = 78 # remember to update the list below when updating
|
||||
SCHEMA_VERSION = 79 # remember to update the list below when updating
|
||||
"""Represents the expectations made by the codebase about the database schema
|
||||
|
||||
This should be incremented whenever the codebase changes its requirements on the
|
||||
|
@ -106,6 +106,9 @@ Changes in SCHEMA_VERSION = 77
|
|||
|
||||
Changes in SCHEMA_VERSION = 78
|
||||
- Validate check (full_user_id IS NOT NULL) on tables profiles and user_filters
|
||||
|
||||
Changes in SCHEMA_VERSION = 79
|
||||
- We no longer write to column user_id of tables profiles and user_filters
|
||||
"""
|
||||
|
||||
|
||||
|
@ -118,7 +121,9 @@ SCHEMA_COMPAT_VERSION = (
|
|||
#
|
||||
# insertions to the column `full_user_id` of tables profiles and user_filters can no
|
||||
# longer be null
|
||||
76
|
||||
#
|
||||
# we no longer write to column `full_user_id` of tables profiles and user_filters
|
||||
78
|
||||
)
|
||||
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
||||
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
from synapse.storage.database import LoggingTransaction
|
||||
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
|
||||
|
||||
|
||||
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
|
||||
"""
|
||||
Update to drop the NOT NULL constraint on column user_id so that we can cease to
|
||||
write to it without inserts to other columns triggering the constraint
|
||||
"""
|
||||
|
||||
if isinstance(database_engine, PostgresEngine):
|
||||
drop_sql = """
|
||||
ALTER TABLE profiles ALTER COLUMN user_id DROP NOT NULL
|
||||
"""
|
||||
cur.execute(drop_sql)
|
||||
else:
|
||||
# irritatingly in SQLite we need to rewrite the table to drop the constraint.
|
||||
cur.execute("DROP TABLE IF EXISTS temp_profiles")
|
||||
|
||||
create_sql = """
|
||||
CREATE TABLE temp_profiles (
|
||||
full_user_id text NOT NULL,
|
||||
user_id text,
|
||||
displayname text,
|
||||
avatar_url text,
|
||||
UNIQUE (full_user_id),
|
||||
UNIQUE (user_id)
|
||||
)
|
||||
"""
|
||||
cur.execute(create_sql)
|
||||
|
||||
copy_sql = """
|
||||
INSERT INTO temp_profiles (
|
||||
user_id,
|
||||
displayname,
|
||||
avatar_url,
|
||||
full_user_id)
|
||||
SELECT user_id, displayname, avatar_url, full_user_id FROM profiles
|
||||
"""
|
||||
cur.execute(copy_sql)
|
||||
|
||||
drop_sql = """
|
||||
DROP TABLE profiles
|
||||
"""
|
||||
cur.execute(drop_sql)
|
||||
|
||||
rename_sql = """
|
||||
ALTER TABLE temp_profiles RENAME to profiles
|
||||
"""
|
||||
cur.execute(rename_sql)
|
|
@ -0,0 +1,54 @@
|
|||
from synapse.storage.database import LoggingTransaction
|
||||
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
|
||||
|
||||
|
||||
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
|
||||
"""
|
||||
Update to drop the NOT NULL constraint on column user_id so that we can cease to
|
||||
write to it without inserts to other columns triggering the constraint
|
||||
"""
|
||||
if isinstance(database_engine, PostgresEngine):
|
||||
drop_sql = """
|
||||
ALTER TABLE user_filters ALTER COLUMN user_id DROP NOT NULL
|
||||
"""
|
||||
cur.execute(drop_sql)
|
||||
|
||||
else:
|
||||
# irritatingly in SQLite we need to rewrite the table to drop the constraint.
|
||||
cur.execute("DROP TABLE IF EXISTS temp_user_filters")
|
||||
|
||||
create_sql = """
|
||||
CREATE TABLE temp_user_filters (
|
||||
full_user_id text NOT NULL,
|
||||
user_id text,
|
||||
filter_id bigint NOT NULL,
|
||||
filter_json bytea NOT NULL
|
||||
)
|
||||
"""
|
||||
cur.execute(create_sql)
|
||||
|
||||
index_sql = """
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON
|
||||
temp_user_filters (full_user_id, filter_id)
|
||||
"""
|
||||
cur.execute(index_sql)
|
||||
|
||||
copy_sql = """
|
||||
INSERT INTO temp_user_filters (
|
||||
user_id,
|
||||
filter_id,
|
||||
filter_json,
|
||||
full_user_id)
|
||||
SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters
|
||||
"""
|
||||
cur.execute(copy_sql)
|
||||
|
||||
drop_sql = """
|
||||
DROP TABLE user_filters
|
||||
"""
|
||||
cur.execute(drop_sql)
|
||||
|
||||
rename_sql = """
|
||||
ALTER TABLE temp_user_filters RENAME to user_filters
|
||||
"""
|
||||
cur.execute(rename_sql)
|
|
@ -15,8 +15,6 @@
|
|||
from twisted.test.proto_helpers import MemoryReactor
|
||||
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.database import LoggingTransaction
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
from synapse.types import UserID
|
||||
from synapse.util import Clock
|
||||
|
||||
|
@ -64,64 +62,3 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase):
|
|||
self.assertIsNone(
|
||||
self.get_success(self.store.get_profile_avatar_url(self.u_frank))
|
||||
)
|
||||
|
||||
def test_profiles_bg_migration(self) -> None:
|
||||
"""
|
||||
Test background job that copies entries from column user_id to full_user_id, adding
|
||||
the hostname in the process.
|
||||
"""
|
||||
updater = self.hs.get_datastores().main.db_pool.updates
|
||||
|
||||
# drop the constraint so we can insert nulls in full_user_id to populate the test
|
||||
if isinstance(self.store.database_engine, PostgresEngine):
|
||||
|
||||
def f(txn: LoggingTransaction) -> None:
|
||||
txn.execute(
|
||||
"ALTER TABLE profiles DROP CONSTRAINT full_user_id_not_null"
|
||||
)
|
||||
|
||||
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||
|
||||
for i in range(0, 70):
|
||||
self.get_success(
|
||||
self.store.db_pool.simple_insert(
|
||||
"profiles",
|
||||
{"user_id": f"hello{i:02}"},
|
||||
)
|
||||
)
|
||||
|
||||
# re-add the constraint so that when it's validated it actually exists
|
||||
if isinstance(self.store.database_engine, PostgresEngine):
|
||||
|
||||
def f(txn: LoggingTransaction) -> None:
|
||||
txn.execute(
|
||||
"ALTER TABLE profiles ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID"
|
||||
)
|
||||
|
||||
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||
|
||||
self.get_success(
|
||||
self.store.db_pool.simple_insert(
|
||||
"background_updates",
|
||||
values={
|
||||
"update_name": "populate_full_user_id_profiles",
|
||||
"progress_json": "{}",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
self.get_success(
|
||||
updater.run_background_updates(False),
|
||||
)
|
||||
|
||||
expected_values = []
|
||||
for i in range(0, 70):
|
||||
expected_values.append((f"@hello{i:02}:{self.hs.hostname}",))
|
||||
|
||||
res = self.get_success(
|
||||
self.store.db_pool.execute(
|
||||
"", None, "SELECT full_user_id from profiles ORDER BY full_user_id"
|
||||
)
|
||||
)
|
||||
self.assertEqual(len(res), len(expected_values))
|
||||
self.assertEqual(res, expected_values)
|
||||
|
|
|
@ -1,94 +0,0 @@
|
|||
# Copyright 2023 The Matrix.org Foundation C.I.C
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.database import LoggingTransaction
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
|
||||
|
||||
class UserFiltersStoreTestCase(unittest.HomeserverTestCase):
|
||||
"""
|
||||
Test background migration that copies entries from column user_id to full_user_id, adding
|
||||
the hostname in the process.
|
||||
"""
|
||||
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
def test_bg_migration(self) -> None:
|
||||
updater = self.hs.get_datastores().main.db_pool.updates
|
||||
|
||||
# drop the constraint so we can insert nulls in full_user_id to populate the test
|
||||
if isinstance(self.store.database_engine, PostgresEngine):
|
||||
|
||||
def f(txn: LoggingTransaction) -> None:
|
||||
txn.execute(
|
||||
"ALTER TABLE user_filters DROP CONSTRAINT full_user_id_not_null"
|
||||
)
|
||||
|
||||
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||
|
||||
for i in range(0, 70):
|
||||
self.get_success(
|
||||
self.store.db_pool.simple_insert(
|
||||
"user_filters",
|
||||
{
|
||||
"user_id": f"hello{i:02}",
|
||||
"filter_id": i,
|
||||
"filter_json": bytearray(i),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# re-add the constraint so that when it's validated it actually exists
|
||||
if isinstance(self.store.database_engine, PostgresEngine):
|
||||
|
||||
def f(txn: LoggingTransaction) -> None:
|
||||
txn.execute(
|
||||
"ALTER TABLE user_filters ADD CONSTRAINT full_user_id_not_null CHECK (full_user_id IS NOT NULL) NOT VALID"
|
||||
)
|
||||
|
||||
self.get_success(self.store.db_pool.runInteraction("", f))
|
||||
|
||||
self.get_success(
|
||||
self.store.db_pool.simple_insert(
|
||||
"background_updates",
|
||||
values={
|
||||
"update_name": "populate_full_user_id_user_filters",
|
||||
"progress_json": "{}",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
self.get_success(
|
||||
updater.run_background_updates(False),
|
||||
)
|
||||
|
||||
expected_values = []
|
||||
for i in range(0, 70):
|
||||
expected_values.append((f"@hello{i:02}:{self.hs.hostname}",))
|
||||
|
||||
res = self.get_success(
|
||||
self.store.db_pool.execute(
|
||||
"", None, "SELECT full_user_id from user_filters ORDER BY full_user_id"
|
||||
)
|
||||
)
|
||||
self.assertEqual(len(res), len(expected_values))
|
||||
self.assertEqual(res, expected_values)
|
Loading…
Reference in New Issue