2015-12-09 16:51:34 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2017-10-10 12:21:41 +02:00
|
|
|
# Copyright 2017 New Vector Ltd
|
2015-12-09 16:51:34 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
|
2016-01-18 15:09:47 +01:00
|
|
|
from synapse.types import UserID
|
2017-03-29 16:53:14 +02:00
|
|
|
from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
|
2016-01-19 17:01:05 +01:00
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2016-01-18 15:09:47 +01:00
|
|
|
|
2018-04-15 21:43:35 +02:00
|
|
|
from six import string_types
|
|
|
|
|
2015-12-09 16:51:34 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-01-18 15:09:47 +01:00
|
|
|
GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]')
|
|
|
|
IS_GLOB = re.compile(r'[\?\*\[\]]')
|
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
|
|
|
|
|
|
|
def _room_member_count(ev, condition, room_member_count):
|
2017-10-05 13:39:18 +02:00
|
|
|
return _test_ineq_condition(condition, room_member_count)
|
|
|
|
|
2017-10-05 14:08:02 +02:00
|
|
|
|
2017-10-10 16:23:00 +02:00
|
|
|
def _sender_notification_permission(ev, condition, sender_power_level, power_levels):
|
|
|
|
notif_level_key = condition.get('key')
|
|
|
|
if notif_level_key is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
notif_levels = power_levels.get('notifications', {})
|
|
|
|
room_notif_level = notif_levels.get(notif_level_key, 50)
|
|
|
|
|
2017-10-10 16:53:34 +02:00
|
|
|
return sender_power_level >= room_notif_level
|
2017-10-05 13:39:18 +02:00
|
|
|
|
2017-10-05 14:08:02 +02:00
|
|
|
|
2017-10-05 13:39:18 +02:00
|
|
|
def _test_ineq_condition(condition, number):
|
2016-01-18 15:09:47 +01:00
|
|
|
if 'is' not in condition:
|
|
|
|
return False
|
|
|
|
m = INEQUALITY_EXPR.match(condition['is'])
|
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
|
|
|
rhs = int(rhs)
|
|
|
|
|
|
|
|
if ineq == '' or ineq == '==':
|
2017-10-05 13:39:18 +02:00
|
|
|
return number == rhs
|
2016-01-18 15:09:47 +01:00
|
|
|
elif ineq == '<':
|
2017-10-05 13:39:18 +02:00
|
|
|
return number < rhs
|
2016-01-18 15:09:47 +01:00
|
|
|
elif ineq == '>':
|
2017-10-05 13:39:18 +02:00
|
|
|
return number > rhs
|
2016-01-18 15:09:47 +01:00
|
|
|
elif ineq == '>=':
|
2017-10-05 13:39:18 +02:00
|
|
|
return number >= rhs
|
2016-01-18 15:09:47 +01:00
|
|
|
elif ineq == '<=':
|
2017-10-05 13:39:18 +02:00
|
|
|
return number <= rhs
|
2016-01-18 15:09:47 +01:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-10-05 14:08:02 +02:00
|
|
|
|
2016-04-07 17:31:38 +02:00
|
|
|
def tweaks_for_actions(actions):
|
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
|
|
|
if 'set_tweak' in a and 'value' in a:
|
|
|
|
tweaks[a['set_tweak']] = a['value']
|
|
|
|
return tweaks
|
2015-12-09 16:51:34 +01:00
|
|
|
|
|
|
|
|
2016-01-18 15:09:47 +01:00
|
|
|
class PushRuleEvaluatorForEvent(object):
|
2017-10-10 16:23:00 +02:00
|
|
|
def __init__(self, event, room_member_count, sender_power_level, power_levels):
|
2016-01-18 15:09:47 +01:00
|
|
|
self._event = event
|
|
|
|
self._room_member_count = room_member_count
|
2017-10-05 13:39:18 +02:00
|
|
|
self._sender_power_level = sender_power_level
|
2017-10-10 16:23:00 +02:00
|
|
|
self._power_levels = power_levels
|
2016-01-18 15:09:47 +01:00
|
|
|
|
2016-01-18 16:42:23 +01:00
|
|
|
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
|
2016-01-18 15:09:47 +01:00
|
|
|
self._value_cache = _flatten_dict(event)
|
2015-12-09 16:51:34 +01:00
|
|
|
|
2016-02-18 17:05:13 +01:00
|
|
|
def matches(self, condition, user_id, display_name):
|
2016-01-18 15:09:47 +01:00
|
|
|
if condition['kind'] == 'event_match':
|
2016-01-18 11:09:14 +01:00
|
|
|
return self._event_match(condition, user_id)
|
2015-12-09 16:51:34 +01:00
|
|
|
elif condition['kind'] == 'contains_display_name':
|
2016-01-18 15:09:47 +01:00
|
|
|
return self._contains_display_name(display_name)
|
2015-12-09 16:51:34 +01:00
|
|
|
elif condition['kind'] == 'room_member_count':
|
2016-01-18 15:09:47 +01:00
|
|
|
return _room_member_count(
|
|
|
|
self._event, condition, self._room_member_count
|
|
|
|
)
|
2017-10-10 16:23:00 +02:00
|
|
|
elif condition['kind'] == 'sender_notification_permission':
|
|
|
|
return _sender_notification_permission(
|
|
|
|
self._event, condition, self._sender_power_level, self._power_levels,
|
2017-10-05 13:39:18 +02:00
|
|
|
)
|
2015-12-09 16:51:34 +01:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2016-01-18 11:09:14 +01:00
|
|
|
def _event_match(self, condition, user_id):
|
2016-01-18 15:09:47 +01:00
|
|
|
pattern = condition.get('pattern', None)
|
|
|
|
|
2016-01-18 11:09:14 +01:00
|
|
|
if not pattern:
|
|
|
|
pattern_type = condition.get('pattern_type', None)
|
|
|
|
if pattern_type == "user_id":
|
|
|
|
pattern = user_id
|
|
|
|
elif pattern_type == "user_localpart":
|
|
|
|
pattern = UserID.from_string(user_id).localpart
|
|
|
|
|
2016-01-18 15:09:47 +01:00
|
|
|
if not pattern:
|
|
|
|
logger.warn("event_match condition with no pattern")
|
|
|
|
return False
|
|
|
|
|
|
|
|
# XXX: optimisation: cache our pattern regexps
|
|
|
|
if condition['key'] == 'content.body':
|
2016-01-18 17:48:17 +01:00
|
|
|
body = self._event["content"].get("body", None)
|
|
|
|
if not body:
|
|
|
|
return False
|
2016-01-18 15:09:47 +01:00
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
return _glob_matches(pattern, body, word_boundary=True)
|
2016-01-18 15:09:47 +01:00
|
|
|
else:
|
|
|
|
haystack = self._get_value(condition['key'])
|
|
|
|
if haystack is None:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
return _glob_matches(pattern, haystack)
|
2016-01-18 15:09:47 +01:00
|
|
|
|
|
|
|
def _contains_display_name(self, display_name):
|
|
|
|
if not display_name:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
body = self._event["content"].get("body", None)
|
|
|
|
if not body:
|
|
|
|
return False
|
2016-01-18 15:09:47 +01:00
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
return _glob_matches(display_name, body, word_boundary=True)
|
2016-01-18 15:09:47 +01:00
|
|
|
|
|
|
|
def _get_value(self, dotted_key):
|
|
|
|
return self._value_cache.get(dotted_key, None)
|
|
|
|
|
2015-12-09 16:51:34 +01:00
|
|
|
|
2017-03-29 16:53:14 +02:00
|
|
|
# Caches (glob, word_boundary) -> regex for push. See _glob_matches
|
|
|
|
regex_cache = LruCache(50000 * CACHE_SIZE_FACTOR)
|
|
|
|
register_cache("regex_push_cache", regex_cache)
|
|
|
|
|
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
def _glob_matches(glob, value, word_boundary=False):
|
|
|
|
"""Tests if value matches glob.
|
2016-01-18 15:09:47 +01:00
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
Args:
|
|
|
|
glob (string)
|
|
|
|
value (string): String to test against glob.
|
|
|
|
word_boundary (bool): Whether to match against word boundaries or entire
|
|
|
|
string. Defaults to False.
|
2016-01-18 15:09:47 +01:00
|
|
|
|
2016-01-18 17:48:17 +01:00
|
|
|
Returns:
|
|
|
|
bool
|
|
|
|
"""
|
2016-01-18 18:04:36 +01:00
|
|
|
|
2017-03-29 16:53:14 +02:00
|
|
|
try:
|
|
|
|
r = regex_cache.get((glob, word_boundary), None)
|
|
|
|
if not r:
|
|
|
|
r = _glob_to_re(glob, word_boundary)
|
|
|
|
regex_cache[(glob, word_boundary)] = r
|
|
|
|
return r.search(value)
|
2016-01-19 15:43:24 +01:00
|
|
|
except re.error:
|
|
|
|
logger.warn("Failed to parse glob to regex: %r", glob)
|
|
|
|
return False
|
2016-01-18 15:09:47 +01:00
|
|
|
|
|
|
|
|
2017-03-29 16:53:14 +02:00
|
|
|
def _glob_to_re(glob, word_boundary):
|
|
|
|
"""Generates regex for a given glob.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
glob (string)
|
|
|
|
word_boundary (bool): Whether to match against word boundaries or entire
|
|
|
|
string. Defaults to False.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
regex object
|
|
|
|
"""
|
|
|
|
if IS_GLOB.search(glob):
|
|
|
|
r = re.escape(glob)
|
|
|
|
|
|
|
|
r = r.replace(r'\*', '.*?')
|
|
|
|
r = r.replace(r'\?', '.')
|
|
|
|
|
|
|
|
# handle [abc], [a-z] and [!a-z] style ranges.
|
|
|
|
r = GLOB_REGEX.sub(
|
|
|
|
lambda x: (
|
|
|
|
'[%s%s]' % (
|
|
|
|
x.group(1) and '^' or '',
|
|
|
|
x.group(2).replace(r'\\\-', '-')
|
|
|
|
)
|
|
|
|
),
|
|
|
|
r,
|
|
|
|
)
|
|
|
|
if word_boundary:
|
2017-10-05 12:33:30 +02:00
|
|
|
r = _re_word_boundary(r)
|
2017-03-29 16:53:14 +02:00
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
else:
|
|
|
|
r = "^" + r + "$"
|
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
elif word_boundary:
|
|
|
|
r = re.escape(glob)
|
2017-10-05 12:33:30 +02:00
|
|
|
r = _re_word_boundary(r)
|
2017-03-29 16:53:14 +02:00
|
|
|
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
else:
|
|
|
|
r = "^" + re.escape(glob) + "$"
|
|
|
|
return re.compile(r, flags=re.IGNORECASE)
|
|
|
|
|
2017-10-05 12:43:10 +02:00
|
|
|
|
2017-10-05 12:33:30 +02:00
|
|
|
def _re_word_boundary(r):
|
|
|
|
"""
|
|
|
|
Adds word boundary characters to the start and end of an
|
|
|
|
expression to require that the match occur as a whole word,
|
|
|
|
but do so respecting the fact that strings starting or ending
|
|
|
|
with non-word characters will change word boundaries.
|
|
|
|
"""
|
2017-10-05 12:57:43 +02:00
|
|
|
# we can't use \b as it chokes on unicode. however \W seems to be okay
|
|
|
|
# as shorthand for [^0-9A-Za-z_].
|
|
|
|
return r"(^|\W)%s(\W|$)" % (r,)
|
2017-10-05 12:33:30 +02:00
|
|
|
|
2017-03-29 16:53:14 +02:00
|
|
|
|
2017-07-05 01:28:43 +02:00
|
|
|
def _flatten_dict(d, prefix=[], result=None):
|
|
|
|
if result is None:
|
|
|
|
result = {}
|
2016-01-18 15:09:47 +01:00
|
|
|
for key, value in d.items():
|
2018-04-15 21:43:35 +02:00
|
|
|
if isinstance(value, string_types):
|
2016-01-18 15:09:47 +01:00
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
|
|
|
elif hasattr(value, "items"):
|
2016-02-02 18:18:50 +01:00
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
2016-01-18 15:09:47 +01:00
|
|
|
|
|
|
|
return result
|