Environment takes precedence over config values

pull/6391/head
Andrew Morgan 2020-05-01 18:03:45 +01:00
parent 424215a0c3
commit c7f3bf66c2
3 changed files with 43 additions and 25 deletions

View File

@ -103,17 +103,11 @@ class CacheConfig(Config):
# Set the global one so that it's reflected in new caches
properties.default_factor_size = self.global_factor
# Load cache factors from the environment, but override them with the
# ones in the config file if they exist
individual_factors = {
key[len(_CACHE_PREFIX) + 1 :].lower(): float(val)
for key, val in self._environ.items()
if key.startswith(_CACHE_PREFIX + "_")
}
individual_factors_config = cache_config.get("per_cache_factors", {}) or {}
if not isinstance(individual_factors_config, dict):
# Load cache factors from the config, but override them with the
# environment if they exist
individual_factors = cache_config.get("per_cache_factors", {}) or {}
if not isinstance(individual_factors, dict):
raise ConfigError("caches.per_cache_factors must be a dictionary")
individual_factors.update(individual_factors_config)
for cache, factor in individual_factors.items():
if not isinstance(factor, (int, float)):
@ -122,6 +116,15 @@ class CacheConfig(Config):
)
self.cache_factors[cache.lower()] = factor
# Override with environment
individual_factors.update(
{
key[len(_CACHE_PREFIX) + 1 :].lower(): float(val)
for key, val in self._environ.items()
if key.startswith(_CACHE_PREFIX + "_")
}
)
# Resize all caches (if necessary) with the new factors we've loaded
self.resize_all_caches()

View File

@ -90,13 +90,27 @@ class Cache(object):
def __init__(
self,
name,
max_entries=1000,
keylen=1,
tree=False,
iterable=False,
apply_cache_factor_from_config=True,
name: str,
max_entries: int = 1000,
keylen: int = 1,
tree: bool = False,
iterable: bool = False,
apply_cache_factor_from_config: bool = True,
):
"""
Args:
name: The name of the cache
max_entries: Maximum amount of entries that the cache will hold
keylen: The length of the tuple used as the cache key
tree: Use a TreeCache instead of a dict as the underlying cache type
iterable: If True, count each item in the cached object as an entry,
rather than each cached object
apply_cache_factor_from_config: Whether cache factors specified in the
config file affect `max_entries`
Returns:
Cache
"""
cache_type = TreeCache if tree else dict
self._pending_deferred_cache = cache_type()

View File

@ -15,6 +15,7 @@
import threading
from functools import wraps
from typing import Callable, Optional, Type, Union
from synapse.config import cache as cache_config
from synapse.util.caches.treecache import TreeCache
@ -52,18 +53,18 @@ class LruCache(object):
def __init__(
self,
max_size,
keylen=1,
cache_type=dict,
size_callback=None,
evicted_callback=None,
apply_cache_factor_from_config=True,
max_size: int,
keylen: int = 1,
cache_type: Type[Union[dict, TreeCache]] = dict,
size_callback: Optional[Callable] = None,
evicted_callback: Optional[Callable] = None,
apply_cache_factor_from_config: bool = True,
):
"""
Args:
max_size (int):
max_size: The maximum amount of entries the cache can hold
keylen (int):
keylen: The length of the tuple used as the cache key
cache_type (type):
type of underlying cache to be used. Typically one of dict
@ -76,7 +77,7 @@ class LruCache(object):
entry
apply_cache_factor_from_config (bool): If true, `max_size` will be
multiplied by a cache factor derived from the homeserver config.
multiplied by a cache factor derived from the homeserver config
"""
cache = cache_type()
self.cache = cache # Used for introspection.