2020-04-22 14:07:41 +02:00
|
|
|
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2020-07-27 19:54:43 +02:00
|
|
|
from inspect import isawaitable
|
2022-02-08 17:03:08 +01:00
|
|
|
from typing import TYPE_CHECKING, Any, Generic, Optional, Type, TypeVar, cast
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-02-11 17:06:29 +01:00
|
|
|
import attr
|
2020-04-22 14:07:41 +02:00
|
|
|
import txredisapi
|
2021-03-12 17:37:57 +01:00
|
|
|
from zope.interface import implementer
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-03-11 19:35:09 +01:00
|
|
|
from twisted.internet.address import IPv4Address, IPv6Address
|
|
|
|
from twisted.internet.interfaces import IAddress, IConnector
|
|
|
|
from twisted.python.failure import Failure
|
|
|
|
|
2020-07-22 01:40:42 +02:00
|
|
|
from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
|
|
|
|
from synapse.metrics.background_process_metrics import (
|
|
|
|
BackgroundProcessLoggingContext,
|
|
|
|
run_as_background_process,
|
2021-01-26 11:54:54 +01:00
|
|
|
wrap_as_background_process,
|
2020-07-22 01:40:42 +02:00
|
|
|
)
|
2020-04-22 14:07:41 +02:00
|
|
|
from synapse.replication.tcp.commands import (
|
|
|
|
Command,
|
|
|
|
ReplicateCommand,
|
|
|
|
parse_command_from_line,
|
|
|
|
)
|
2020-04-22 17:26:19 +02:00
|
|
|
from synapse.replication.tcp.protocol import (
|
2021-03-12 17:37:57 +01:00
|
|
|
IReplicationConnection,
|
2020-04-22 17:26:19 +02:00
|
|
|
tcp_inbound_commands_counter,
|
|
|
|
tcp_outbound_commands_counter,
|
|
|
|
)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.replication.tcp.handler import ReplicationCommandHandler
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-11 17:06:29 +01:00
|
|
|
T = TypeVar("T")
|
|
|
|
V = TypeVar("V")
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
class ConstantProperty(Generic[T, V]):
|
|
|
|
"""A descriptor that returns the given constant, ignoring attempts to set
|
|
|
|
it.
|
|
|
|
"""
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
constant: V = attr.ib()
|
2021-02-11 17:06:29 +01:00
|
|
|
|
2021-04-05 15:10:18 +02:00
|
|
|
def __get__(self, obj: Optional[T], objtype: Optional[Type[T]] = None) -> V:
|
2021-02-11 17:06:29 +01:00
|
|
|
return self.constant
|
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def __set__(self, obj: Optional[T], value: V) -> None:
|
2021-02-11 17:06:29 +01:00
|
|
|
pass
|
|
|
|
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-03-12 17:37:57 +01:00
|
|
|
@implementer(IReplicationConnection)
|
|
|
|
class RedisSubscriber(txredisapi.SubscriberProtocol):
|
2020-04-22 14:07:41 +02:00
|
|
|
"""Connection to redis subscribed to replication stream.
|
|
|
|
|
2020-05-05 19:53:38 +02:00
|
|
|
This class fulfils two functions:
|
|
|
|
|
|
|
|
(a) it implements the twisted Protocol API, where it handles the SUBSCRIBEd redis
|
|
|
|
connection, parsing *incoming* messages into replication commands, and passing them
|
|
|
|
to `ReplicationCommandHandler`
|
|
|
|
|
2021-03-12 17:37:57 +01:00
|
|
|
(b) it implements the IReplicationConnection API, where it sends *outgoing* commands
|
2020-05-05 19:53:38 +02:00
|
|
|
onto outbound_redis_connection.
|
2020-04-22 14:07:41 +02:00
|
|
|
|
|
|
|
Due to the vagaries of `txredisapi` we don't want to have a custom
|
|
|
|
constructor, so instead we expect the defined attributes below to be set
|
|
|
|
immediately after initialisation.
|
|
|
|
|
|
|
|
Attributes:
|
2021-01-26 11:54:54 +01:00
|
|
|
synapse_handler: The command handler to handle incoming commands.
|
|
|
|
synapse_stream_name: The *redis* stream name to subscribe to and publish
|
|
|
|
from (not anything to do with Synapse replication streams).
|
|
|
|
synapse_outbound_redis_connection: The connection to redis to use to send
|
2020-04-22 14:07:41 +02:00
|
|
|
commands.
|
|
|
|
"""
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
synapse_handler: "ReplicationCommandHandler"
|
|
|
|
synapse_stream_name: str
|
2022-03-08 13:26:05 +01:00
|
|
|
synapse_outbound_redis_connection: txredisapi.ConnectionHandler
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any):
|
2020-07-22 01:40:42 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
# a logcontext which we use for processing incoming commands. We declare it as a
|
|
|
|
# background process so that the CPU stats get reported to prometheus.
|
2021-10-15 14:10:58 +02:00
|
|
|
with PreserveLoggingContext():
|
|
|
|
# thanks to `PreserveLoggingContext()`, the new logcontext is guaranteed to
|
|
|
|
# capture the sentinel context as its containing context and won't prevent
|
|
|
|
# GC of / unintentionally reactivate what would be the current context.
|
|
|
|
self._logging_context = BackgroundProcessLoggingContext(
|
|
|
|
"replication_command_handler"
|
|
|
|
)
|
2020-07-22 01:40:42 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def connectionMade(self) -> None:
|
2020-05-05 19:53:38 +02:00
|
|
|
logger.info("Connected to redis")
|
2020-05-04 15:04:09 +02:00
|
|
|
super().connectionMade()
|
2020-05-05 19:53:38 +02:00
|
|
|
run_as_background_process("subscribe-replication", self._send_subscribe)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
async def _send_subscribe(self) -> None:
|
2020-05-05 19:53:38 +02:00
|
|
|
# it's important to make sure that we only send the REPLICATE command once we
|
|
|
|
# have successfully subscribed to the stream - otherwise we might miss the
|
|
|
|
# POSITION response sent back by the other end.
|
2021-01-26 11:54:54 +01:00
|
|
|
logger.info("Sending redis SUBSCRIBE for %s", self.synapse_stream_name)
|
|
|
|
await make_deferred_yieldable(self.subscribe(self.synapse_stream_name))
|
2020-05-05 19:53:38 +02:00
|
|
|
logger.info(
|
|
|
|
"Successfully subscribed to redis stream, sending REPLICATE command"
|
|
|
|
)
|
2021-01-26 11:54:54 +01:00
|
|
|
self.synapse_handler.new_connection(self)
|
2020-05-05 19:53:38 +02:00
|
|
|
await self._async_send_command(ReplicateCommand())
|
|
|
|
logger.info("REPLICATE successfully sent")
|
|
|
|
|
2020-05-13 10:57:15 +02:00
|
|
|
# We send out our positions when there is a new connection in case the
|
|
|
|
# other side missed updates. We do this for Redis connections as the
|
|
|
|
# otherside won't know we've connected and so won't issue a REPLICATE.
|
2021-01-26 11:54:54 +01:00
|
|
|
self.synapse_handler.send_positions_to_connection(self)
|
2020-05-13 10:57:15 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def messageReceived(self, pattern: str, channel: str, message: str) -> None:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Received a message from redis."""
|
2020-07-22 01:40:42 +02:00
|
|
|
with PreserveLoggingContext(self._logging_context):
|
|
|
|
self._parse_and_dispatch_message(message)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def _parse_and_dispatch_message(self, message: str) -> None:
|
2020-04-22 14:07:41 +02:00
|
|
|
if message.strip() == "":
|
|
|
|
# Ignore blank lines
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
cmd = parse_command_from_line(message)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
2021-02-16 23:32:34 +01:00
|
|
|
"Failed to parse replication line: %r",
|
|
|
|
message,
|
2020-04-22 14:07:41 +02:00
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2020-04-22 17:26:19 +02:00
|
|
|
# We use "redis" as the name here as we don't have 1:1 connections to
|
|
|
|
# remote instances.
|
|
|
|
tcp_inbound_commands_counter.labels(cmd.NAME, "redis").inc()
|
|
|
|
|
2020-07-27 19:54:43 +02:00
|
|
|
self.handle_command(cmd)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2020-07-27 19:54:43 +02:00
|
|
|
def handle_command(self, cmd: Command) -> None:
|
2020-04-22 14:07:41 +02:00
|
|
|
"""Handle a command we have received over the replication stream.
|
|
|
|
|
2020-07-27 19:54:43 +02:00
|
|
|
Delegates to `self.handler.on_<COMMAND>` (which can optionally return an
|
|
|
|
Awaitable).
|
2020-04-22 14:07:41 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
cmd: received command
|
|
|
|
"""
|
|
|
|
|
2021-01-26 11:54:54 +01:00
|
|
|
cmd_func = getattr(self.synapse_handler, "on_%s" % (cmd.NAME,), None)
|
2020-07-27 19:54:43 +02:00
|
|
|
if not cmd_func:
|
2020-04-22 14:07:41 +02:00
|
|
|
logger.warning("Unhandled command: %r", cmd)
|
2020-07-27 19:54:43 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
res = cmd_func(self, cmd)
|
|
|
|
|
|
|
|
# the handler might be a coroutine: fire it off as a background process
|
|
|
|
# if so.
|
|
|
|
|
|
|
|
if isawaitable(res):
|
|
|
|
run_as_background_process(
|
|
|
|
"replication-" + cmd.get_logcontext_id(), lambda: res
|
|
|
|
)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def connectionLost(self, reason: Failure) -> None: # type: ignore[override]
|
2020-05-05 19:53:38 +02:00
|
|
|
logger.info("Lost connection to redis")
|
2020-05-04 15:04:09 +02:00
|
|
|
super().connectionLost(reason)
|
2021-01-26 11:54:54 +01:00
|
|
|
self.synapse_handler.lost_connection(self)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-10-15 14:10:58 +02:00
|
|
|
# mark the logging context as finished by triggering `__exit__()`
|
|
|
|
with PreserveLoggingContext():
|
|
|
|
with self._logging_context:
|
|
|
|
pass
|
|
|
|
# the sentinel context is now active, which may not be correct.
|
|
|
|
# PreserveLoggingContext() will restore the correct logging context.
|
2020-07-22 01:40:42 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def send_command(self, cmd: Command) -> None:
|
2020-04-22 14:07:41 +02:00
|
|
|
"""Send a command if connection has been established.
|
|
|
|
|
|
|
|
Args:
|
2022-02-08 17:03:08 +01:00
|
|
|
cmd: The command to send
|
2020-04-22 14:07:41 +02:00
|
|
|
"""
|
2020-10-26 10:30:19 +01:00
|
|
|
run_as_background_process(
|
|
|
|
"send-cmd", self._async_send_command, cmd, bg_start_span=False
|
|
|
|
)
|
2020-05-05 19:53:38 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
async def _async_send_command(self, cmd: Command) -> None:
|
2020-05-05 19:53:38 +02:00
|
|
|
"""Encode a replication command and send it over our outbound connection"""
|
2020-04-22 14:07:41 +02:00
|
|
|
string = "%s %s" % (cmd.NAME, cmd.to_line())
|
|
|
|
if "\n" in string:
|
|
|
|
raise Exception("Unexpected newline in command: %r", string)
|
|
|
|
|
|
|
|
encoded_string = string.encode("utf-8")
|
|
|
|
|
2020-04-22 17:26:19 +02:00
|
|
|
# We use "redis" as the name here as we don't have 1:1 connections to
|
|
|
|
# remote instances.
|
|
|
|
tcp_outbound_commands_counter.labels(cmd.NAME, "redis").inc()
|
|
|
|
|
2020-05-05 19:53:38 +02:00
|
|
|
await make_deferred_yieldable(
|
2021-01-26 11:54:54 +01:00
|
|
|
self.synapse_outbound_redis_connection.publish(
|
|
|
|
self.synapse_stream_name, encoded_string
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class SynapseRedisFactory(txredisapi.RedisFactory):
|
|
|
|
"""A subclass of RedisFactory that periodically sends pings to ensure that
|
|
|
|
we detect dead connections.
|
|
|
|
"""
|
|
|
|
|
2021-02-11 17:06:29 +01:00
|
|
|
# We want to *always* retry connecting, txredisapi will stop if there is a
|
|
|
|
# failure during certain operations, e.g. during AUTH.
|
|
|
|
continueTrying = cast(bool, ConstantProperty(True))
|
|
|
|
|
2021-01-26 11:54:54 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
hs: "HomeServer",
|
|
|
|
uuid: str,
|
|
|
|
dbid: Optional[int],
|
|
|
|
poolsize: int,
|
|
|
|
isLazy: bool = False,
|
|
|
|
handler: Type = txredisapi.ConnectionHandler,
|
|
|
|
charset: str = "utf-8",
|
|
|
|
password: Optional[str] = None,
|
|
|
|
replyTimeout: int = 30,
|
|
|
|
convertNumbers: Optional[int] = True,
|
|
|
|
):
|
|
|
|
super().__init__(
|
|
|
|
uuid=uuid,
|
|
|
|
dbid=dbid,
|
|
|
|
poolsize=poolsize,
|
|
|
|
isLazy=isLazy,
|
|
|
|
handler=handler,
|
|
|
|
charset=charset,
|
|
|
|
password=password,
|
|
|
|
replyTimeout=replyTimeout,
|
|
|
|
convertNumbers=convertNumbers,
|
2020-05-05 19:53:38 +02:00
|
|
|
)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-01-26 11:54:54 +01:00
|
|
|
hs.get_clock().looping_call(self._send_ping, 30 * 1000)
|
|
|
|
|
|
|
|
@wrap_as_background_process("redis_ping")
|
2022-02-08 17:03:08 +01:00
|
|
|
async def _send_ping(self) -> None:
|
2021-01-26 11:54:54 +01:00
|
|
|
for connection in self.pool:
|
|
|
|
try:
|
|
|
|
await make_deferred_yieldable(connection.ping())
|
|
|
|
except Exception:
|
|
|
|
logger.warning("Failed to send ping to a redis connection")
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-03-11 19:35:09 +01:00
|
|
|
# ReconnectingClientFactory has some logging (if you enable `self.noisy`), but
|
|
|
|
# it's rubbish. We add our own here.
|
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def startedConnecting(self, connector: IConnector) -> None:
|
2021-03-11 19:35:09 +01:00
|
|
|
logger.info(
|
|
|
|
"Connecting to redis server %s", format_address(connector.getDestination())
|
|
|
|
)
|
|
|
|
super().startedConnecting(connector)
|
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def clientConnectionFailed(self, connector: IConnector, reason: Failure) -> None:
|
2021-03-11 19:35:09 +01:00
|
|
|
logger.info(
|
|
|
|
"Connection to redis server %s failed: %s",
|
|
|
|
format_address(connector.getDestination()),
|
|
|
|
reason.value,
|
|
|
|
)
|
|
|
|
super().clientConnectionFailed(connector, reason)
|
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def clientConnectionLost(self, connector: IConnector, reason: Failure) -> None:
|
2021-03-11 19:35:09 +01:00
|
|
|
logger.info(
|
|
|
|
"Connection to redis server %s lost: %s",
|
|
|
|
format_address(connector.getDestination()),
|
|
|
|
reason.value,
|
|
|
|
)
|
|
|
|
super().clientConnectionLost(connector, reason)
|
|
|
|
|
|
|
|
|
|
|
|
def format_address(address: IAddress) -> str:
|
|
|
|
if isinstance(address, (IPv4Address, IPv6Address)):
|
|
|
|
return "%s:%i" % (address.host, address.port)
|
|
|
|
return str(address)
|
|
|
|
|
2021-01-26 11:54:54 +01:00
|
|
|
|
|
|
|
class RedisDirectTcpReplicationClientFactory(SynapseRedisFactory):
|
2020-04-22 14:07:41 +02:00
|
|
|
"""This is a reconnecting factory that connects to redis and immediately
|
|
|
|
subscribes to a stream.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
hs
|
|
|
|
outbound_redis_connection: A connection to redis that will be used to
|
2020-07-09 15:52:58 +02:00
|
|
|
send outbound commands (this is separate to the redis connection
|
2020-04-22 14:07:41 +02:00
|
|
|
used to subscribe).
|
|
|
|
"""
|
|
|
|
|
|
|
|
maxDelay = 5
|
|
|
|
protocol = RedisSubscriber
|
|
|
|
|
|
|
|
def __init__(
|
2022-03-08 13:26:05 +01:00
|
|
|
self, hs: "HomeServer", outbound_redis_connection: txredisapi.ConnectionHandler
|
2020-04-22 14:07:41 +02:00
|
|
|
):
|
|
|
|
|
2021-01-26 11:54:54 +01:00
|
|
|
super().__init__(
|
|
|
|
hs,
|
|
|
|
uuid="subscriber",
|
|
|
|
dbid=None,
|
|
|
|
poolsize=1,
|
|
|
|
replyTimeout=30,
|
|
|
|
password=hs.config.redis.redis_password,
|
|
|
|
)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2022-03-10 14:01:56 +01:00
|
|
|
self.synapse_handler = hs.get_replication_command_handler()
|
2021-01-26 11:54:54 +01:00
|
|
|
self.synapse_stream_name = hs.hostname
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2021-01-26 11:54:54 +01:00
|
|
|
self.synapse_outbound_redis_connection = outbound_redis_connection
|
2020-04-22 14:07:41 +02:00
|
|
|
|
2022-02-08 17:03:08 +01:00
|
|
|
def buildProtocol(self, addr: IAddress) -> RedisSubscriber:
|
2021-01-26 11:54:54 +01:00
|
|
|
p = super().buildProtocol(addr)
|
|
|
|
p = cast(RedisSubscriber, p)
|
2020-04-22 14:07:41 +02:00
|
|
|
|
|
|
|
# We do this here rather than add to the constructor of `RedisSubcriber`
|
|
|
|
# as to do so would involve overriding `buildProtocol` entirely, however
|
|
|
|
# the base method does some other things than just instantiating the
|
|
|
|
# protocol.
|
2021-01-26 11:54:54 +01:00
|
|
|
p.synapse_handler = self.synapse_handler
|
|
|
|
p.synapse_outbound_redis_connection = self.synapse_outbound_redis_connection
|
|
|
|
p.synapse_stream_name = self.synapse_stream_name
|
2020-04-22 14:07:41 +02:00
|
|
|
|
|
|
|
return p
|
2020-10-02 10:57:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
def lazyConnection(
|
2021-01-26 11:54:54 +01:00
|
|
|
hs: "HomeServer",
|
2020-10-02 10:57:12 +02:00
|
|
|
host: str = "localhost",
|
|
|
|
port: int = 6379,
|
|
|
|
dbid: Optional[int] = None,
|
|
|
|
reconnect: bool = True,
|
|
|
|
password: Optional[str] = None,
|
2021-01-26 11:54:54 +01:00
|
|
|
replyTimeout: int = 30,
|
2022-03-08 13:26:05 +01:00
|
|
|
) -> txredisapi.ConnectionHandler:
|
2021-01-26 11:54:54 +01:00
|
|
|
"""Creates a connection to Redis that is lazily set up and reconnects if the
|
|
|
|
connections is lost.
|
2020-10-02 10:57:12 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
uuid = "%s:%d" % (host, port)
|
2021-01-26 11:54:54 +01:00
|
|
|
factory = SynapseRedisFactory(
|
|
|
|
hs,
|
|
|
|
uuid=uuid,
|
|
|
|
dbid=dbid,
|
|
|
|
poolsize=1,
|
|
|
|
isLazy=True,
|
|
|
|
handler=txredisapi.ConnectionHandler,
|
|
|
|
password=password,
|
|
|
|
replyTimeout=replyTimeout,
|
2020-10-02 10:57:12 +02:00
|
|
|
)
|
|
|
|
factory.continueTrying = reconnect
|
2021-01-26 11:54:54 +01:00
|
|
|
|
|
|
|
reactor = hs.get_reactor()
|
2021-09-30 13:51:47 +02:00
|
|
|
reactor.connectTCP(
|
2022-02-08 15:15:59 +01:00
|
|
|
host,
|
2021-09-30 13:51:47 +02:00
|
|
|
port,
|
|
|
|
factory,
|
|
|
|
timeout=30,
|
|
|
|
bindAddress=None,
|
|
|
|
)
|
2020-10-02 10:57:12 +02:00
|
|
|
|
|
|
|
return factory.handler
|