Merge branch 'rav/fix_expiring_cache_len' into erikj/destination_retry_cache

pull/3933/head
Richard van der Hoff 2018-09-26 12:55:53 +01:00
commit 5b4028fa78
6 changed files with 519 additions and 266 deletions

1
changelog.d/3911.misc Normal file
View File

@ -0,0 +1 @@
Fix the docker image building on python 3

1
changelog.d/3956.bugfix Normal file
View File

@ -0,0 +1 @@
Fix exceptions from metrics handler

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,5 @@
FROM docker.io/python:2-alpine3.8
ARG PYTHON_VERSION=2
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
COPY . /synapse

View File

@ -5,6 +5,7 @@ import os
import sys
import subprocess
import glob
import codecs
# Utility functions
convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
@ -23,7 +24,7 @@ def generate_secrets(environ, secrets):
with open(filename) as handle: value = handle.read()
else:
print("Generating a random secret for {}".format(name))
value = os.urandom(32).encode("hex")
value = codecs.encode(os.urandom(32), "hex").decode()
with open(filename, "w") as handle: handle.write(value)
environ[secret] = value

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import six
@ -20,6 +21,8 @@ from six.moves import intern
from prometheus_client.core import REGISTRY, Gauge, GaugeMetricFamily
logger = logging.getLogger(__name__)
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5))
@ -76,16 +79,20 @@ def register_cache(cache_type, cache_name, cache):
return []
def collect(self):
if cache_type == "response_cache":
response_cache_size.labels(cache_name).set(len(cache))
response_cache_hits.labels(cache_name).set(self.hits)
response_cache_evicted.labels(cache_name).set(self.evicted_size)
response_cache_total.labels(cache_name).set(self.hits + self.misses)
else:
cache_size.labels(cache_name).set(len(cache))
cache_hits.labels(cache_name).set(self.hits)
cache_evicted.labels(cache_name).set(self.evicted_size)
cache_total.labels(cache_name).set(self.hits + self.misses)
try:
if cache_type == "response_cache":
response_cache_size.labels(cache_name).set(len(cache))
response_cache_hits.labels(cache_name).set(self.hits)
response_cache_evicted.labels(cache_name).set(self.evicted_size)
response_cache_total.labels(cache_name).set(self.hits + self.misses)
else:
cache_size.labels(cache_name).set(len(cache))
cache_hits.labels(cache_name).set(self.hits)
cache_evicted.labels(cache_name).set(self.evicted_size)
cache_total.labels(cache_name).set(self.hits + self.misses)
except Exception as e:
logger.warn("Error calculating metrics for %s: %s", cache_name, e)
raise
yield GaugeMetricFamily("__unused", "")