Compare commits

...

8 Commits

Author SHA1 Message Date
Andrew Morgan ae6070bc12 Move event_cache_size to CacheConfig 2020-05-01 19:45:14 +01:00
Andrew Morgan fe890508cc Fix and clarify config comments 2020-05-01 19:43:31 +01:00
Andrew Morgan 485f17737b fix tests 2020-05-01 18:22:15 +01:00
Andrew Morgan c7f3bf66c2 Environment takes precedence over config values 2020-05-01 18:06:24 +01:00
Andrew Morgan 424215a0c3 Fix cache: config block parsing 2020-05-01 15:36:02 +01:00
Andrew Morgan 83cf583adf Allow creating an LruCache that's not affected by cache factor 2020-05-01 15:35:46 +01:00
Andrew Morgan fa56e16c85 sample config 2020-05-01 15:20:13 +01:00
Andrew Morgan 9e3a9ba4f5 Make config follow the guidelines 2020-05-01 15:12:12 +01:00
8 changed files with 110 additions and 55 deletions

View File

@ -634,10 +634,6 @@ database:
args:
database: DATADIR/homeserver.db
# Number of events to cache in memory.
#
#event_cache_size: 10K
## Logging ##
@ -1975,16 +1971,28 @@ opentracing:
# false
# Cache configuration
## Cache Configuration ##
# Caching can be configured through the following options.
#
# 'global_factor' controls the global cache factor. This overrides the
# "SYNAPSE_CACHE_FACTOR" environment variable.
#
# 'per_cache_factors' is a dictionary of cache name to cache factor for that
# individual cache.
#
#caches:
# global_factor: 0.5
# per_cache_factors:
# get_users_who_share_room_with_user: 2
# The number of events to cache in memory. Not affected by the
# caches.global_factor.
#
#event_cache_size: 10K
caches:
# Controls the global cache factor. This can be overridden by the
# "SYNAPSE_CACHE_FACTOR" environment variable.
#
# global_factor: 0.5
# A dictionary of cache name to cache factor for that individual
# cache. This can be overridden by environment variables
# comprised of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in
# capital letters and underscores.
#
# Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2
#
# per_cache_factors:
# get_users_who_share_room_with_user: 2

View File

@ -70,19 +70,31 @@ class CacheConfig(Config):
def generate_config_section(self, **kwargs):
return """\
# Cache configuration
## Cache Configuration ##
# Caching can be configured through the following options.
#
# 'global_factor' controls the global cache factor. This overrides the
# "SYNAPSE_CACHE_FACTOR" environment variable.
#
# 'per_cache_factors' is a dictionary of cache name to cache factor for that
# individual cache.
#
#caches:
# global_factor: 0.5
# per_cache_factors:
# get_users_who_share_room_with_user: 2
# The number of events to cache in memory. Not affected by the
# caches.global_factor.
#
#event_cache_size: 10K
caches:
# Controls the global cache factor. This can be overridden by the
# "SYNAPSE_CACHE_FACTOR" environment variable.
#
# global_factor: 0.5
# A dictionary of cache name to cache factor for that individual
# cache. This can be overridden by environment variables
# comprised of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in
# capital letters and underscores.
#
# Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2
#
# per_cache_factors:
# get_users_who_share_room_with_user: 2
"""
def read_config(self, config, **kwargs):
@ -91,7 +103,7 @@ class CacheConfig(Config):
)
self.cache_factors = {} # type: Dict[str, float]
cache_config = config.get("caches", {})
cache_config = config.get("caches") or {}
self.global_factor = cache_config.get(
"global_factor", properties.default_factor_size
)
@ -101,17 +113,19 @@ class CacheConfig(Config):
# Set the global one so that it's reflected in new caches
properties.default_factor_size = self.global_factor
# Load cache factors from the environment, but override them with the
# ones in the config file if they exist
individual_factors = {
key[len(_CACHE_PREFIX) + 1 :].lower(): float(val)
for key, val in self._environ.items()
if key.startswith(_CACHE_PREFIX + "_")
}
individual_factors_config = cache_config.get("per_cache_factors", {}) or {}
if not isinstance(individual_factors_config, dict):
# Load cache factors from the config
individual_factors = cache_config.get("per_cache_factors", {}) or {}
if not isinstance(individual_factors, dict):
raise ConfigError("caches.per_cache_factors must be a dictionary")
individual_factors.update(individual_factors_config)
# Override factors from environment if necessary
individual_factors.update(
{
key[len(_CACHE_PREFIX) + 1 :].lower(): float(val)
for key, val in self._environ.items()
if key.startswith(_CACHE_PREFIX + "_")
}
)
for cache, factor in individual_factors.items():
if not isinstance(factor, (int, float)):

View File

@ -68,10 +68,6 @@ database:
name: sqlite3
args:
database: %(database_path)s
# Number of events to cache in memory.
#
#event_cache_size: 10K
"""

View File

@ -75,7 +75,10 @@ class EventsWorkerStore(SQLBaseStore):
super(EventsWorkerStore, self).__init__(database, db_conn, hs)
self._get_event_cache = Cache(
"*getEvent*", keylen=3, max_entries=hs.config.caches.event_cache_size
"*getEvent*",
keylen=3,
max_entries=hs.config.caches.event_cache_size,
apply_cache_factor_from_config=False,
)
self._event_fetch_lock = threading.Condition()

View File

@ -88,7 +88,29 @@ class Cache(object):
"_pending_deferred_cache",
)
def __init__(self, name, max_entries=1000, keylen=1, tree=False, iterable=False):
def __init__(
self,
name: str,
max_entries: int = 1000,
keylen: int = 1,
tree: bool = False,
iterable: bool = False,
apply_cache_factor_from_config: bool = True,
):
"""
Args:
name: The name of the cache
max_entries: Maximum amount of entries that the cache will hold
keylen: The length of the tuple used as the cache key
tree: Use a TreeCache instead of a dict as the underlying cache type
iterable: If True, count each item in the cached object as an entry,
rather than each cached object
apply_cache_factor_from_config: Whether cache factors specified in the
config file affect `max_entries`
Returns:
Cache
"""
cache_type = TreeCache if tree else dict
self._pending_deferred_cache = cache_type()
@ -98,6 +120,7 @@ class Cache(object):
cache_type=cache_type,
size_callback=(lambda d: len(d)) if iterable else None,
evicted_callback=self._on_evicted,
apply_cache_factor_from_config=apply_cache_factor_from_config,
)
self.name = name

View File

@ -52,11 +52,11 @@ class ExpiringCache(object):
an item on access. Defaults to False.
iterable (bool): If true, the size is calculated by summing the
sizes of all entries, rather than the number of entries.
"""
self._cache_name = cache_name
self._original_max_size = max_len
self._max_size = int(max_len * cache_config.properties.default_factor_size)
self._clock = clock

View File

@ -15,6 +15,7 @@
import threading
from functools import wraps
from typing import Callable, Optional, Type, Union
from synapse.config import cache as cache_config
from synapse.util.caches.treecache import TreeCache
@ -52,17 +53,18 @@ class LruCache(object):
def __init__(
self,
max_size,
keylen=1,
cache_type=dict,
size_callback=None,
evicted_callback=None,
max_size: int,
keylen: int = 1,
cache_type: Type[Union[dict, TreeCache]] = dict,
size_callback: Optional[Callable] = None,
evicted_callback: Optional[Callable] = None,
apply_cache_factor_from_config: bool = True,
):
"""
Args:
max_size (int):
max_size: The maximum amount of entries the cache can hold
keylen (int):
keylen: The length of the tuple used as the cache key
cache_type (type):
type of underlying cache to be used. Typically one of dict
@ -73,13 +75,22 @@ class LruCache(object):
evicted_callback (func(int)|None):
if not None, called on eviction with the size of the evicted
entry
apply_cache_factor_from_config (bool): If true, `max_size` will be
multiplied by a cache factor derived from the homeserver config
"""
cache = cache_type()
self.cache = cache # Used for introspection.
# Save the original max size, and apply the default size factor.
self._original_max_size = max_size
self.max_size = int(max_size * cache_config.properties.default_factor_size)
# We previously didn't apply the cache factor here, and as such some caches were
# not affected by the global cache factor. Add an option here to disable applying
# the cache factor when a cache is created
if apply_cache_factor_from_config:
self.max_size = int(max_size * cache_config.properties.default_factor_size)
else:
self.max_size = int(max_size)
list_root = _Node(None, None, None, None)
list_root.next_node = list_root

View File

@ -49,8 +49,8 @@ class CacheConfigTests(TestCase):
def test_config_overrides_environ(self):
"""
Individual cache factors defined in config will take precedence over
ones in the environment.
Individual cache factors defined in the environment will take precedence
over those in the config.
"""
config = {"caches": {"per_cache_factors": {"foo": 2, "bar": 3}}}
t = TestConfig()
@ -62,7 +62,7 @@ class CacheConfigTests(TestCase):
self.assertEqual(
dict(t.caches.cache_factors),
{"foo": 2.0, "bar": 3.0, "something_or_other": 2.0},
{"foo": 1.0, "bar": 3.0, "something_or_other": 2.0},
)
def test_individual_instantiated_before_config_load(self):