2015-02-24 17:58:26 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-02-24 17:58:26 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
2015-03-04 20:22:14 +01:00
|
|
|
from itertools import chain
|
2018-01-18 12:30:49 +01:00
|
|
|
import logging
|
2018-05-02 17:52:42 +02:00
|
|
|
import re
|
2018-01-18 12:30:49 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2015-03-04 20:22:14 +01:00
|
|
|
|
|
|
|
|
2018-01-15 17:58:41 +01:00
|
|
|
def flatten(items):
|
|
|
|
"""Flatten a list of lists
|
|
|
|
|
|
|
|
Args:
|
|
|
|
items: iterable[iterable[X]]
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list[X]: flattened list
|
|
|
|
"""
|
|
|
|
return list(chain.from_iterable(items))
|
2015-03-04 20:22:14 +01:00
|
|
|
|
|
|
|
|
2015-03-04 16:47:23 +01:00
|
|
|
class BaseMetric(object):
|
2018-01-15 17:52:52 +01:00
|
|
|
"""Base class for metrics which report a single value per label set
|
|
|
|
"""
|
2015-02-24 17:58:26 +01:00
|
|
|
|
2018-01-15 17:58:41 +01:00
|
|
|
def __init__(self, name, labels=[], alternative_names=[]):
|
2018-01-15 17:52:52 +01:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
name (str): principal name for this metric
|
|
|
|
labels (list(str)): names of the labels which will be reported
|
|
|
|
for this metric
|
2018-01-15 17:58:41 +01:00
|
|
|
alternative_names (iterable(str)): list of alternative names for
|
|
|
|
this metric. This can be useful to provide a migration path
|
|
|
|
when renaming metrics.
|
2018-01-15 17:52:52 +01:00
|
|
|
"""
|
2018-01-15 17:58:41 +01:00
|
|
|
self._names = [name] + list(alternative_names)
|
2015-03-12 17:24:38 +01:00
|
|
|
self.labels = labels # OK not to clone as we never write it
|
2015-02-24 17:58:26 +01:00
|
|
|
|
2015-03-04 18:53:51 +01:00
|
|
|
def dimension(self):
|
2015-03-06 16:28:06 +01:00
|
|
|
return len(self.labels)
|
2015-03-04 18:53:51 +01:00
|
|
|
|
|
|
|
def is_scalar(self):
|
2015-03-06 16:28:06 +01:00
|
|
|
return not len(self.labels)
|
2015-03-04 18:53:51 +01:00
|
|
|
|
2015-03-06 19:40:20 +01:00
|
|
|
def _render_labelvalue(self, value):
|
2018-05-02 17:52:42 +02:00
|
|
|
return '"%s"' % (_escape_label_value(value),)
|
2015-03-06 19:40:20 +01:00
|
|
|
|
2015-03-04 16:47:23 +01:00
|
|
|
def _render_key(self, values):
|
2015-03-04 20:28:17 +01:00
|
|
|
if self.is_scalar():
|
|
|
|
return ""
|
|
|
|
return "{%s}" % (
|
2015-03-06 19:40:20 +01:00
|
|
|
",".join(["%s=%s" % (k, self._render_labelvalue(v))
|
|
|
|
for k, v in zip(self.labels, values)])
|
2015-03-04 20:28:17 +01:00
|
|
|
)
|
|
|
|
|
2018-01-15 17:58:41 +01:00
|
|
|
def _render_for_labels(self, label_values, value):
|
|
|
|
"""Render this metric for a single set of labels
|
|
|
|
|
|
|
|
Args:
|
2018-05-03 17:25:05 +02:00
|
|
|
label_values (list[object]): values for each of the labels,
|
|
|
|
(which get stringified).
|
2018-01-15 17:58:41 +01:00
|
|
|
value: value of the metric at with these labels
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
iterable[str]: rendered metric
|
|
|
|
"""
|
|
|
|
rendered_labels = self._render_key(label_values)
|
|
|
|
return (
|
|
|
|
"%s%s %.12g" % (name, rendered_labels, value)
|
|
|
|
for name in self._names
|
|
|
|
)
|
|
|
|
|
2018-01-15 17:52:52 +01:00
|
|
|
def render(self):
|
|
|
|
"""Render this metric
|
|
|
|
|
|
|
|
Each metric is rendered as:
|
|
|
|
|
|
|
|
name{label1="val1",label2="val2"} value
|
|
|
|
|
|
|
|
https://prometheus.io/docs/instrumenting/exposition_formats/#text-format-details
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
iterable[str]: rendered metrics
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2015-03-04 16:47:23 +01:00
|
|
|
|
|
|
|
class CounterMetric(BaseMetric):
|
|
|
|
"""The simplest kind of metric; one that stores a monotonically-increasing
|
2018-01-13 00:21:32 +01:00
|
|
|
value that counts events or running totals.
|
|
|
|
|
|
|
|
Example use cases for Counters:
|
|
|
|
- Number of requests processed
|
|
|
|
- Number of items that were inserted into a queue
|
|
|
|
- Total amount of data that a system has processed
|
|
|
|
Counters can only go up (and be reset when the process restarts).
|
|
|
|
"""
|
2015-03-04 16:47:23 +01:00
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(CounterMetric, self).__init__(*args, **kwargs)
|
|
|
|
|
2018-01-15 17:52:52 +01:00
|
|
|
# dict[list[str]]: value for each set of label values. the keys are the
|
|
|
|
# label values, in the same order as the labels in self.labels.
|
|
|
|
#
|
2018-04-12 12:19:04 +02:00
|
|
|
# (if the metric is a scalar, the (single) key is the empty tuple).
|
2015-02-24 17:58:26 +01:00
|
|
|
self.counts = {}
|
|
|
|
|
|
|
|
# Scalar metrics are never empty
|
2015-03-04 18:53:51 +01:00
|
|
|
if self.is_scalar():
|
2018-01-13 00:21:32 +01:00
|
|
|
self.counts[()] = 0.
|
2015-02-24 17:58:26 +01:00
|
|
|
|
2015-03-10 16:54:16 +01:00
|
|
|
def inc_by(self, incr, *values):
|
2015-03-04 18:53:51 +01:00
|
|
|
if len(values) != self.dimension():
|
2015-03-12 17:24:38 +01:00
|
|
|
raise ValueError(
|
|
|
|
"Expected as many values to inc() as labels (%d)" % (self.dimension())
|
2015-02-24 17:58:26 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# TODO: should assert that the tag values are all strings
|
|
|
|
|
|
|
|
if values not in self.counts:
|
2015-03-10 16:54:16 +01:00
|
|
|
self.counts[values] = incr
|
2015-02-24 17:58:26 +01:00
|
|
|
else:
|
2015-03-10 16:54:16 +01:00
|
|
|
self.counts[values] += incr
|
|
|
|
|
|
|
|
def inc(self, *values):
|
|
|
|
self.inc_by(1, *values)
|
2015-02-24 17:58:26 +01:00
|
|
|
|
2016-06-02 12:29:44 +02:00
|
|
|
def render(self):
|
2018-01-15 17:58:41 +01:00
|
|
|
return flatten(
|
|
|
|
self._render_for_labels(k, self.counts[k])
|
|
|
|
for k in sorted(self.counts.keys())
|
|
|
|
)
|
2016-06-02 12:29:44 +02:00
|
|
|
|
2015-03-04 16:47:23 +01:00
|
|
|
|
2018-04-11 12:07:33 +02:00
|
|
|
class GaugeMetric(BaseMetric):
|
|
|
|
"""A metric that can go up or down
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(GaugeMetric, self).__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
# dict[list[str]]: value for each set of label values. the keys are the
|
|
|
|
# label values, in the same order as the labels in self.labels.
|
|
|
|
#
|
2018-04-12 12:19:04 +02:00
|
|
|
# (if the metric is a scalar, the (single) key is the empty tuple).
|
2018-04-11 12:07:33 +02:00
|
|
|
self.guages = {}
|
|
|
|
|
|
|
|
def set(self, v, *values):
|
|
|
|
if len(values) != self.dimension():
|
|
|
|
raise ValueError(
|
|
|
|
"Expected as many values to inc() as labels (%d)" % (self.dimension())
|
|
|
|
)
|
|
|
|
|
|
|
|
# TODO: should assert that the tag values are all strings
|
|
|
|
|
|
|
|
self.guages[values] = v
|
|
|
|
|
|
|
|
def render(self):
|
|
|
|
return flatten(
|
|
|
|
self._render_for_labels(k, self.guages[k])
|
|
|
|
for k in sorted(self.guages.keys())
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-03-04 17:46:44 +01:00
|
|
|
class CallbackMetric(BaseMetric):
|
|
|
|
"""A metric that returns the numeric value returned by a callback whenever
|
|
|
|
it is rendered. Typically this is used to implement gauges that yield the
|
|
|
|
size or other state of some in-memory object by actively querying it."""
|
|
|
|
|
2015-03-06 16:28:06 +01:00
|
|
|
def __init__(self, name, callback, labels=[]):
|
|
|
|
super(CallbackMetric, self).__init__(name, labels=labels)
|
2015-03-04 17:46:44 +01:00
|
|
|
|
|
|
|
self.callback = callback
|
|
|
|
|
|
|
|
def render(self):
|
2018-01-18 12:30:49 +01:00
|
|
|
try:
|
|
|
|
value = self.callback()
|
|
|
|
except Exception:
|
|
|
|
logger.exception("Failed to render %s", self.name)
|
|
|
|
return ["# FAILED to render " + self.name]
|
2015-03-04 17:46:44 +01:00
|
|
|
|
2015-03-04 18:58:10 +01:00
|
|
|
if self.is_scalar():
|
2018-01-15 17:58:41 +01:00
|
|
|
return list(self._render_for_labels([], value))
|
2015-03-04 18:58:10 +01:00
|
|
|
|
2018-01-15 17:58:41 +01:00
|
|
|
return flatten(
|
|
|
|
self._render_for_labels(k, value[k])
|
|
|
|
for k in sorted(value.keys())
|
|
|
|
)
|
2015-03-04 18:53:51 +01:00
|
|
|
|
2015-03-04 20:22:14 +01:00
|
|
|
|
2015-03-10 16:54:16 +01:00
|
|
|
class DistributionMetric(object):
|
2015-03-10 16:21:03 +01:00
|
|
|
"""A combination of an event counter and an accumulator, which counts
|
|
|
|
both the number of events and accumulates the total value. Typically this
|
|
|
|
could be used to keep track of method-running times, or other distributions
|
|
|
|
of values that occur in discrete occurances.
|
2015-03-04 20:22:14 +01:00
|
|
|
|
|
|
|
TODO(paul): Try to export some heatmap-style stats?
|
|
|
|
"""
|
|
|
|
|
2015-03-10 16:54:16 +01:00
|
|
|
def __init__(self, name, *args, **kwargs):
|
|
|
|
self.counts = CounterMetric(name + ":count", **kwargs)
|
|
|
|
self.totals = CounterMetric(name + ":total", **kwargs)
|
2015-03-04 20:22:14 +01:00
|
|
|
|
2015-03-10 16:21:03 +01:00
|
|
|
def inc_by(self, inc, *values):
|
2015-03-10 16:54:16 +01:00
|
|
|
self.counts.inc(*values)
|
|
|
|
self.totals.inc_by(inc, *values)
|
2015-03-04 20:22:14 +01:00
|
|
|
|
2015-03-10 16:54:16 +01:00
|
|
|
def render(self):
|
|
|
|
return self.counts.render() + self.totals.render()
|
2015-03-04 20:22:14 +01:00
|
|
|
|
|
|
|
|
2015-03-04 18:34:23 +01:00
|
|
|
class CacheMetric(object):
|
2018-02-01 18:57:51 +01:00
|
|
|
__slots__ = (
|
|
|
|
"name", "cache_name", "hits", "misses", "evicted_size", "size_callback",
|
|
|
|
)
|
2015-03-04 16:47:23 +01:00
|
|
|
|
2016-06-02 12:29:44 +02:00
|
|
|
def __init__(self, name, size_callback, cache_name):
|
2015-03-04 16:47:23 +01:00
|
|
|
self.name = name
|
2016-06-02 12:29:44 +02:00
|
|
|
self.cache_name = cache_name
|
2015-03-04 16:47:23 +01:00
|
|
|
|
2016-06-02 12:29:44 +02:00
|
|
|
self.hits = 0
|
|
|
|
self.misses = 0
|
2018-02-01 18:57:51 +01:00
|
|
|
self.evicted_size = 0
|
2015-03-04 16:47:23 +01:00
|
|
|
|
2016-06-02 12:29:44 +02:00
|
|
|
self.size_callback = size_callback
|
2015-03-04 18:34:23 +01:00
|
|
|
|
2016-06-02 12:29:44 +02:00
|
|
|
def inc_hits(self):
|
|
|
|
self.hits += 1
|
2015-03-04 16:47:23 +01:00
|
|
|
|
2016-06-02 12:29:44 +02:00
|
|
|
def inc_misses(self):
|
|
|
|
self.misses += 1
|
2015-03-04 16:47:23 +01:00
|
|
|
|
2018-02-01 18:57:51 +01:00
|
|
|
def inc_evictions(self, size=1):
|
|
|
|
self.evicted_size += size
|
|
|
|
|
2015-03-04 16:47:23 +01:00
|
|
|
def render(self):
|
2016-06-02 12:29:44 +02:00
|
|
|
size = self.size_callback()
|
|
|
|
hits = self.hits
|
|
|
|
total = self.misses + self.hits
|
|
|
|
|
|
|
|
return [
|
|
|
|
"""%s:hits{name="%s"} %d""" % (self.name, self.cache_name, hits),
|
|
|
|
"""%s:total{name="%s"} %d""" % (self.name, self.cache_name, total),
|
|
|
|
"""%s:size{name="%s"} %d""" % (self.name, self.cache_name, size),
|
2018-02-01 18:57:51 +01:00
|
|
|
"""%s:evicted_size{name="%s"} %d""" % (
|
|
|
|
self.name, self.cache_name, self.evicted_size
|
|
|
|
),
|
2016-06-02 12:29:44 +02:00
|
|
|
]
|
2016-07-20 16:47:28 +02:00
|
|
|
|
|
|
|
|
|
|
|
class MemoryUsageMetric(object):
|
|
|
|
"""Keeps track of the current memory usage, using psutil.
|
|
|
|
|
|
|
|
The class will keep the current min/max/sum/counts of rss over the last
|
|
|
|
WINDOW_SIZE_SEC, by polling UPDATE_HZ times per second
|
|
|
|
"""
|
|
|
|
|
|
|
|
UPDATE_HZ = 2 # number of times to get memory per second
|
|
|
|
WINDOW_SIZE_SEC = 30 # the size of the window in seconds
|
|
|
|
|
2016-08-08 12:12:21 +02:00
|
|
|
def __init__(self, hs, psutil):
|
2016-07-20 16:47:28 +02:00
|
|
|
clock = hs.get_clock()
|
|
|
|
self.memory_snapshots = []
|
2016-08-08 12:12:21 +02:00
|
|
|
|
2016-07-20 16:47:28 +02:00
|
|
|
self.process = psutil.Process()
|
|
|
|
|
|
|
|
clock.looping_call(self._update_curr_values, 1000 / self.UPDATE_HZ)
|
|
|
|
|
|
|
|
def _update_curr_values(self):
|
|
|
|
max_size = self.UPDATE_HZ * self.WINDOW_SIZE_SEC
|
|
|
|
self.memory_snapshots.append(self.process.memory_info().rss)
|
|
|
|
self.memory_snapshots[:] = self.memory_snapshots[-max_size:]
|
|
|
|
|
|
|
|
def render(self):
|
2016-07-20 17:59:52 +02:00
|
|
|
if not self.memory_snapshots:
|
|
|
|
return []
|
|
|
|
|
2016-07-20 16:47:28 +02:00
|
|
|
max_rss = max(self.memory_snapshots)
|
|
|
|
min_rss = min(self.memory_snapshots)
|
|
|
|
sum_rss = sum(self.memory_snapshots)
|
|
|
|
len_rss = len(self.memory_snapshots)
|
|
|
|
|
|
|
|
return [
|
|
|
|
"process_psutil_rss:max %d" % max_rss,
|
|
|
|
"process_psutil_rss:min %d" % min_rss,
|
|
|
|
"process_psutil_rss:total %d" % sum_rss,
|
|
|
|
"process_psutil_rss:count %d" % len_rss,
|
|
|
|
]
|
2018-05-02 17:52:42 +02:00
|
|
|
|
|
|
|
|
2018-05-02 18:27:27 +02:00
|
|
|
def _escape_character(m):
|
2018-05-02 17:52:42 +02:00
|
|
|
"""Replaces a single character with its escape sequence.
|
2018-05-02 18:27:27 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
m (re.MatchObject): A match object whose first group is the single
|
|
|
|
character to replace
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str
|
2018-05-02 17:52:42 +02:00
|
|
|
"""
|
2018-05-02 18:27:27 +02:00
|
|
|
c = m.group(1)
|
2018-05-02 17:52:42 +02:00
|
|
|
if c == "\\":
|
|
|
|
return "\\\\"
|
|
|
|
elif c == "\"":
|
|
|
|
return "\\\""
|
|
|
|
elif c == "\n":
|
|
|
|
return "\\n"
|
|
|
|
return c
|
|
|
|
|
|
|
|
|
|
|
|
def _escape_label_value(value):
|
|
|
|
"""Takes a label value and escapes quotes, newlines and backslashes
|
|
|
|
"""
|
2018-05-03 16:51:04 +02:00
|
|
|
return re.sub(r"([\n\"\\])", _escape_character, str(value))
|