MatrixSynapse/synapse/util/file_consumer.py

153 lines
5.7 KiB
Python
Raw Normal View History

2018-01-18 12:02:43 +01:00
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import queue
2021-09-10 18:03:18 +02:00
from typing import BinaryIO, Optional, Union, cast
2018-07-09 08:09:20 +02:00
from twisted.internet import threads
2021-09-10 18:03:18 +02:00
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IPullProducer, IPushProducer
from synapse.logging.context import make_deferred_yieldable, run_in_background
2021-09-10 18:03:18 +02:00
from synapse.types import ISynapseReactor
2020-09-04 12:54:56 +02:00
class BackgroundFileConsumer:
"""A consumer that writes to a file like object. Supports both push
and pull producers
Args:
2021-09-10 18:03:18 +02:00
file_obj: The file like object to write to. Closed when
finished.
2021-09-10 18:03:18 +02:00
reactor: the Twisted reactor to use
"""
# For PushProducers pause if we have this many unwritten slices
_PAUSE_ON_QUEUE_SIZE = 5
# And resume once the size of the queue is less than this
_RESUME_ON_QUEUE_SIZE = 2
2021-09-10 18:03:18 +02:00
def __init__(self, file_obj: BinaryIO, reactor: ISynapseReactor) -> None:
self._file_obj: BinaryIO = file_obj
2021-09-10 18:03:18 +02:00
self._reactor: ISynapseReactor = reactor
# Producer we're registered with
2021-09-10 18:03:18 +02:00
self._producer: Optional[Union[IPushProducer, IPullProducer]] = None
# True if PushProducer, false if PullProducer
self.streaming = False
# For PushProducers, indicates whether we've paused the producer and
# need to call resumeProducing before we get more data.
2018-01-18 12:10:12 +01:00
self._paused_producer = False
# Queue of slices of bytes to be written. When producer calls
# unregister a final None is sent.
2021-09-10 18:03:18 +02:00
self._bytes_queue: queue.Queue[Optional[bytes]] = queue.Queue()
# Deferred that is resolved when finished writing
2021-09-10 18:03:18 +02:00
self._finished_deferred: Optional[Deferred[None]] = None
# If the _writer thread throws an exception it gets stored here.
2021-09-10 18:03:18 +02:00
self._write_exception: Optional[Exception] = None
2021-09-10 18:03:18 +02:00
def registerProducer(
self, producer: Union[IPushProducer, IPullProducer], streaming: bool
) -> None:
2018-01-18 12:02:43 +01:00
"""Part of IConsumer interface
Args:
2021-09-10 18:03:18 +02:00
producer
streaming: True if push based producer, False if pull
based.
"""
2018-01-18 12:10:12 +01:00
if self._producer:
raise Exception("registerProducer called twice")
2018-01-18 12:10:12 +01:00
self._producer = producer
self.streaming = streaming
self._finished_deferred = run_in_background(
threads.deferToThreadPool,
self._reactor,
self._reactor.getThreadPool(),
self._writer,
)
if not streaming:
2018-01-18 12:10:12 +01:00
self._producer.resumeProducing()
2021-09-10 18:03:18 +02:00
def unregisterProducer(self) -> None:
"""Part of IProducer interface"""
2018-01-18 12:10:12 +01:00
self._producer = None
2021-09-10 18:03:18 +02:00
assert self._finished_deferred is not None
2018-01-18 12:10:12 +01:00
if not self._finished_deferred.called:
self._bytes_queue.put_nowait(None)
2021-09-10 18:03:18 +02:00
def write(self, write_bytes: bytes) -> None:
"""Part of IProducer interface"""
if self._write_exception:
raise self._write_exception
2021-09-10 18:03:18 +02:00
assert self._finished_deferred is not None
2018-01-18 12:10:12 +01:00
if self._finished_deferred.called:
raise Exception("consumer has closed")
2021-09-10 18:03:18 +02:00
self._bytes_queue.put_nowait(write_bytes)
2018-01-18 12:02:43 +01:00
# If this is a PushProducer and the queue is getting behind
# then we pause the producer.
2018-01-18 12:10:12 +01:00
if self.streaming and self._bytes_queue.qsize() >= self._PAUSE_ON_QUEUE_SIZE:
self._paused_producer = True
2021-09-10 18:03:18 +02:00
assert self._producer is not None
# cast safe because `streaming` means this is an IPushProducer
cast(IPushProducer, self._producer).pauseProducing()
2021-09-10 18:03:18 +02:00
def _writer(self) -> None:
"""This is run in a background thread to write to the file."""
try:
2018-01-18 12:10:12 +01:00
while self._producer or not self._bytes_queue.empty():
# If we've paused the producer check if we should resume the
# producer.
2018-01-18 12:10:12 +01:00
if self._producer and self._paused_producer:
if self._bytes_queue.qsize() <= self._RESUME_ON_QUEUE_SIZE:
self._reactor.callFromThread(self._resume_paused_producer)
2018-01-18 12:10:12 +01:00
bytes = self._bytes_queue.get()
# If we get a None (or empty list) then that's a signal used
# to indicate we should check if we should stop.
if bytes:
2018-01-18 12:10:12 +01:00
self._file_obj.write(bytes)
# If its a pull producer then we need to explicitly ask for
# more stuff.
2018-01-18 12:10:12 +01:00
if not self.streaming and self._producer:
self._reactor.callFromThread(self._producer.resumeProducing)
except Exception as e:
self._write_exception = e
raise
finally:
2018-01-18 12:10:12 +01:00
self._file_obj.close()
2021-09-10 18:03:18 +02:00
def wait(self) -> "Deferred[None]":
"""Returns a deferred that resolves when finished writing to file"""
assert self._finished_deferred is not None
2018-01-18 12:10:12 +01:00
return make_deferred_yieldable(self._finished_deferred)
2021-09-10 18:03:18 +02:00
def _resume_paused_producer(self) -> None:
"""Gets called if we should resume producing after being paused"""
2018-01-18 12:10:12 +01:00
if self._paused_producer and self._producer:
self._paused_producer = False
self._producer.resumeProducing()