2014-11-19 19:20:59 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-28 14:17:55 +01:00
|
|
|
# Copyright 2015 OpenMarket Ltd
|
2014-11-19 19:20:59 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from synapse.streams.config import PaginationConfig
|
2015-01-29 19:38:22 +01:00
|
|
|
from synapse.types import StreamToken, UserID
|
2014-11-19 19:20:59 +01:00
|
|
|
|
|
|
|
import synapse.util.async
|
2015-01-29 19:38:22 +01:00
|
|
|
import baserules
|
2014-11-19 19:20:59 +01:00
|
|
|
|
|
|
|
import logging
|
2015-02-11 15:23:10 +01:00
|
|
|
import simplejson as json
|
2015-01-30 15:46:03 +01:00
|
|
|
import re
|
2014-11-19 19:20:59 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-12-03 14:37:02 +01:00
|
|
|
|
2014-11-19 19:20:59 +01:00
|
|
|
class Pusher(object):
|
|
|
|
INITIAL_BACKOFF = 1000
|
2014-11-21 13:21:00 +01:00
|
|
|
MAX_BACKOFF = 60 * 60 * 1000
|
|
|
|
GIVE_UP_AFTER = 24 * 60 * 60 * 1000
|
2015-01-23 18:07:06 +01:00
|
|
|
DEFAULT_ACTIONS = ['notify']
|
2014-11-19 19:20:59 +01:00
|
|
|
|
2015-01-30 15:46:03 +01:00
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
2015-02-03 17:51:07 +01:00
|
|
|
def __init__(self, _hs, profile_tag, user_name, app_id,
|
2015-01-13 20:48:37 +01:00
|
|
|
app_display_name, device_display_name, pushkey, pushkey_ts,
|
|
|
|
data, last_token, last_success, failing_since):
|
2014-11-19 19:20:59 +01:00
|
|
|
self.hs = _hs
|
|
|
|
self.evStreamHandler = self.hs.get_handlers().event_stream_handler
|
|
|
|
self.store = self.hs.get_datastore()
|
2014-11-21 13:21:00 +01:00
|
|
|
self.clock = self.hs.get_clock()
|
2015-02-03 17:51:07 +01:00
|
|
|
self.profile_tag = profile_tag
|
2014-11-19 19:20:59 +01:00
|
|
|
self.user_name = user_name
|
2014-12-03 14:37:02 +01:00
|
|
|
self.app_id = app_id
|
2014-11-19 19:20:59 +01:00
|
|
|
self.app_display_name = app_display_name
|
|
|
|
self.device_display_name = device_display_name
|
|
|
|
self.pushkey = pushkey
|
2015-01-13 20:48:37 +01:00
|
|
|
self.pushkey_ts = pushkey_ts
|
2014-11-19 19:20:59 +01:00
|
|
|
self.data = data
|
|
|
|
self.last_token = last_token
|
2014-12-03 14:37:02 +01:00
|
|
|
self.last_success = last_success # not actually used
|
2014-11-19 19:20:59 +01:00
|
|
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
2014-12-03 14:37:02 +01:00
|
|
|
self.failing_since = failing_since
|
2014-12-18 15:49:22 +01:00
|
|
|
self.alive = True
|
2014-11-19 19:20:59 +01:00
|
|
|
|
2015-01-28 12:55:49 +01:00
|
|
|
# The last value of last_active_time that we saw
|
|
|
|
self.last_last_active_time = 0
|
2015-01-29 17:10:01 +01:00
|
|
|
self.has_unread = True
|
2015-01-28 12:55:49 +01:00
|
|
|
|
2015-01-23 18:07:06 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _actions_for_event(self, ev):
|
2015-01-13 14:14:41 +01:00
|
|
|
"""
|
|
|
|
This should take into account notification settings that the user
|
|
|
|
has configured both globally and per-room when we have the ability
|
|
|
|
to do such things.
|
|
|
|
"""
|
|
|
|
if ev['user_id'] == self.user_name:
|
|
|
|
# let's assume you probably know about messages you sent yourself
|
2015-01-23 18:07:06 +01:00
|
|
|
defer.returnValue(['dont_notify'])
|
|
|
|
|
2015-01-26 18:27:28 +01:00
|
|
|
if ev['type'] == 'm.room.member':
|
|
|
|
if ev['state_key'] != self.user_name:
|
|
|
|
defer.returnValue(['dont_notify'])
|
|
|
|
|
2015-02-05 15:46:06 +01:00
|
|
|
rawrules = yield self.store.get_push_rules_for_user_name(self.user_name)
|
2015-01-23 18:07:06 +01:00
|
|
|
|
2015-02-05 15:46:06 +01:00
|
|
|
for r in rawrules:
|
2015-01-29 19:38:22 +01:00
|
|
|
r['conditions'] = json.loads(r['conditions'])
|
|
|
|
r['actions'] = json.loads(r['actions'])
|
|
|
|
|
2015-02-05 15:46:06 +01:00
|
|
|
user = UserID.from_string(self.user_name)
|
2015-01-29 19:38:22 +01:00
|
|
|
|
2015-02-05 15:46:06 +01:00
|
|
|
rules = baserules.list_with_base_rules(rawrules, user)
|
2015-01-29 19:38:22 +01:00
|
|
|
|
|
|
|
# get *our* member event for display name matching
|
|
|
|
member_events_for_room = yield self.store.get_current_state(
|
|
|
|
room_id=ev['room_id'],
|
|
|
|
event_type='m.room.member',
|
2015-01-30 15:46:03 +01:00
|
|
|
state_key=None
|
2015-01-29 19:38:22 +01:00
|
|
|
)
|
|
|
|
my_display_name = None
|
2015-01-30 15:46:03 +01:00
|
|
|
room_member_count = 0
|
|
|
|
for mev in member_events_for_room:
|
|
|
|
if mev.content['membership'] != 'join':
|
|
|
|
continue
|
|
|
|
|
|
|
|
# This loop does two things:
|
|
|
|
# 1) Find our current display name
|
2015-01-31 00:10:35 +01:00
|
|
|
if mev.state_key == self.user_name and 'displayname' in mev.content:
|
2015-01-30 15:46:03 +01:00
|
|
|
my_display_name = mev.content['displayname']
|
|
|
|
|
|
|
|
# and 2) Get the number of people in that room
|
|
|
|
room_member_count += 1
|
2015-01-29 19:38:22 +01:00
|
|
|
|
2015-01-23 18:07:06 +01:00
|
|
|
for r in rules:
|
|
|
|
matches = True
|
|
|
|
|
2015-01-29 19:38:22 +01:00
|
|
|
conditions = r['conditions']
|
|
|
|
actions = r['actions']
|
2015-01-23 18:07:06 +01:00
|
|
|
|
|
|
|
for c in conditions:
|
2015-01-29 19:38:22 +01:00
|
|
|
matches &= self._event_fulfills_condition(
|
2015-01-30 15:46:03 +01:00
|
|
|
ev, c, display_name=my_display_name,
|
|
|
|
room_member_count=room_member_count
|
2015-01-29 19:38:22 +01:00
|
|
|
)
|
2015-01-23 18:07:06 +01:00
|
|
|
# ignore rules with no actions (we have an explict 'dont_notify'
|
|
|
|
if len(actions) == 0:
|
|
|
|
logger.warn(
|
|
|
|
"Ignoring rule id %s with no actions for user %s" %
|
|
|
|
(r['rule_id'], r['user_name'])
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
if matches:
|
|
|
|
defer.returnValue(actions)
|
|
|
|
|
|
|
|
defer.returnValue(Pusher.DEFAULT_ACTIONS)
|
|
|
|
|
2015-02-08 01:34:11 +01:00
|
|
|
@staticmethod
|
|
|
|
def _glob_to_regexp(glob):
|
|
|
|
r = re.escape(glob)
|
2015-02-08 03:37:35 +01:00
|
|
|
r = re.sub(r'\\\*', r'.*?', r)
|
2015-02-08 01:34:11 +01:00
|
|
|
r = re.sub(r'\\\?', r'.', r)
|
|
|
|
|
|
|
|
# handle [abc], [a-z] and [!a-z] style ranges.
|
|
|
|
r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
|
|
|
|
lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
|
|
|
|
re.sub(r'\\\-', '-', x.group(2)))), r)
|
|
|
|
return r
|
2015-02-10 17:30:48 +01:00
|
|
|
|
2015-01-30 15:46:03 +01:00
|
|
|
def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
|
2015-01-23 18:07:06 +01:00
|
|
|
if condition['kind'] == 'event_match':
|
|
|
|
if 'pattern' not in condition:
|
|
|
|
logger.warn("event_match condition with no pattern")
|
|
|
|
return False
|
2015-02-08 01:34:11 +01:00
|
|
|
# XXX: optimisation: cache our pattern regexps
|
2015-02-09 17:44:47 +01:00
|
|
|
if condition['key'] == 'content.body':
|
|
|
|
r = r'\b%s\b' % self._glob_to_regexp(condition['pattern'])
|
|
|
|
else:
|
|
|
|
r = r'^%s$' % self._glob_to_regexp(condition['pattern'])
|
2015-01-23 18:07:06 +01:00
|
|
|
val = _value_for_dotted_key(condition['key'], ev)
|
2015-01-29 19:38:22 +01:00
|
|
|
if val is None:
|
|
|
|
return False
|
2015-02-09 18:01:40 +01:00
|
|
|
return re.search(r, val, flags=re.IGNORECASE) is not None
|
2015-02-08 01:34:11 +01:00
|
|
|
|
2015-01-23 18:07:06 +01:00
|
|
|
elif condition['kind'] == 'device':
|
2015-02-03 17:51:07 +01:00
|
|
|
if 'profile_tag' not in condition:
|
2015-01-23 18:07:06 +01:00
|
|
|
return True
|
2015-02-03 17:51:07 +01:00
|
|
|
return condition['profile_tag'] == self.profile_tag
|
2015-02-08 01:34:11 +01:00
|
|
|
|
2015-01-29 19:38:22 +01:00
|
|
|
elif condition['kind'] == 'contains_display_name':
|
|
|
|
# This is special because display names can be different
|
|
|
|
# between rooms and so you can't really hard code it in a rule.
|
|
|
|
# Optimisation: we should cache these names and update them from
|
|
|
|
# the event stream.
|
|
|
|
if 'content' not in ev or 'body' not in ev['content']:
|
|
|
|
return False
|
2015-01-30 15:46:03 +01:00
|
|
|
if not display_name:
|
|
|
|
return False
|
2015-02-10 17:30:48 +01:00
|
|
|
return re.search(
|
|
|
|
"\b%s\b" % re.escape(display_name), ev['content']['body'],
|
|
|
|
flags=re.IGNORECASE
|
|
|
|
) is not None
|
2015-02-08 01:34:11 +01:00
|
|
|
|
2015-01-30 15:46:03 +01:00
|
|
|
elif condition['kind'] == 'room_member_count':
|
|
|
|
if 'is' not in condition:
|
|
|
|
return False
|
|
|
|
m = Pusher.INEQUALITY_EXPR.match(condition['is'])
|
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
|
|
|
rhs = int(rhs)
|
|
|
|
|
|
|
|
if ineq == '' or ineq == '==':
|
|
|
|
return room_member_count == rhs
|
|
|
|
elif ineq == '<':
|
|
|
|
return room_member_count < rhs
|
|
|
|
elif ineq == '>':
|
|
|
|
return room_member_count > rhs
|
|
|
|
elif ineq == '>=':
|
|
|
|
return room_member_count >= rhs
|
|
|
|
elif ineq == '<=':
|
|
|
|
return room_member_count <= rhs
|
|
|
|
else:
|
|
|
|
return False
|
2015-01-23 18:07:06 +01:00
|
|
|
else:
|
|
|
|
return True
|
2015-01-13 14:14:41 +01:00
|
|
|
|
2015-01-15 17:56:18 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_context_for_event(self, ev):
|
|
|
|
name_aliases = yield self.store.get_room_name_and_aliases(
|
|
|
|
ev['room_id']
|
|
|
|
)
|
|
|
|
|
|
|
|
ctx = {'aliases': name_aliases[1]}
|
|
|
|
if name_aliases[0] is not None:
|
|
|
|
ctx['name'] = name_aliases[0]
|
|
|
|
|
2015-01-29 19:51:22 +01:00
|
|
|
their_member_events_for_room = yield self.store.get_current_state(
|
|
|
|
room_id=ev['room_id'],
|
|
|
|
event_type='m.room.member',
|
|
|
|
state_key=ev['user_id']
|
|
|
|
)
|
2015-01-31 00:05:49 +01:00
|
|
|
for mev in their_member_events_for_room:
|
|
|
|
if mev.content['membership'] == 'join' and 'displayname' in mev.content:
|
|
|
|
dn = mev.content['displayname']
|
|
|
|
if dn is not None:
|
|
|
|
ctx['sender_display_name'] = dn
|
2015-01-29 19:51:22 +01:00
|
|
|
|
2015-01-15 17:56:18 +01:00
|
|
|
defer.returnValue(ctx)
|
|
|
|
|
2014-11-19 19:20:59 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
|
|
|
if not self.last_token:
|
2014-12-03 14:37:02 +01:00
|
|
|
# First-time setup: get a token to start from (we can't
|
|
|
|
# just start from no token, ie. 'now'
|
|
|
|
# because we need the result to be reproduceable in case
|
|
|
|
# we fail to dispatch the push)
|
2014-11-19 19:20:59 +01:00
|
|
|
config = PaginationConfig(from_token=None, limit='1')
|
2014-12-03 14:37:02 +01:00
|
|
|
chunk = yield self.evStreamHandler.get_stream(
|
|
|
|
self.user_name, config, timeout=0)
|
2014-11-19 19:20:59 +01:00
|
|
|
self.last_token = chunk['end']
|
2014-12-03 14:37:02 +01:00
|
|
|
self.store.update_pusher_last_token(
|
2015-02-13 17:16:16 +01:00
|
|
|
self.app_id, self.pushkey, self.last_token)
|
2014-11-19 19:20:59 +01:00
|
|
|
logger.info("Pusher %s for user %s starting from token %s",
|
|
|
|
self.pushkey, self.user_name, self.last_token)
|
|
|
|
|
2014-12-18 15:49:22 +01:00
|
|
|
while self.alive:
|
2014-11-19 19:20:59 +01:00
|
|
|
from_tok = StreamToken.from_string(self.last_token)
|
|
|
|
config = PaginationConfig(from_token=from_tok, limit='1')
|
2014-12-03 14:37:02 +01:00
|
|
|
chunk = yield self.evStreamHandler.get_stream(
|
2015-01-15 17:17:21 +01:00
|
|
|
self.user_name, config,
|
|
|
|
timeout=100*365*24*60*60*1000, affect_presence=False
|
|
|
|
)
|
2014-11-19 19:20:59 +01:00
|
|
|
|
2014-12-03 14:37:02 +01:00
|
|
|
# limiting to 1 may get 1 event plus 1 presence event, so
|
|
|
|
# pick out the actual event
|
|
|
|
single_event = None
|
2014-11-21 13:21:00 +01:00
|
|
|
for c in chunk['chunk']:
|
2014-12-03 14:37:02 +01:00
|
|
|
if 'event_id' in c: # Hmmm...
|
|
|
|
single_event = c
|
2014-11-21 13:21:00 +01:00
|
|
|
break
|
2014-12-03 14:37:02 +01:00
|
|
|
if not single_event:
|
2014-12-18 19:44:33 +01:00
|
|
|
self.last_token = chunk['end']
|
2014-11-21 13:21:00 +01:00
|
|
|
continue
|
|
|
|
|
2014-12-18 15:49:22 +01:00
|
|
|
if not self.alive:
|
|
|
|
continue
|
|
|
|
|
2015-01-13 14:14:41 +01:00
|
|
|
processed = False
|
2015-01-23 18:07:06 +01:00
|
|
|
actions = yield self._actions_for_event(single_event)
|
|
|
|
tweaks = _tweaks_for_actions(actions)
|
|
|
|
|
|
|
|
if len(actions) == 0:
|
|
|
|
logger.warn("Empty actions! Using default action.")
|
|
|
|
actions = Pusher.DEFAULT_ACTIONS
|
|
|
|
if 'notify' not in actions and 'dont_notify' not in actions:
|
|
|
|
logger.warn("Neither notify nor dont_notify in actions: adding default")
|
|
|
|
actions.extend(Pusher.DEFAULT_ACTIONS)
|
|
|
|
if 'dont_notify' in actions:
|
|
|
|
logger.debug(
|
|
|
|
"%s for %s: dont_notify",
|
|
|
|
single_event['event_id'], self.user_name
|
|
|
|
)
|
|
|
|
processed = True
|
|
|
|
else:
|
|
|
|
rejected = yield self.dispatch_push(single_event, tweaks)
|
2015-01-29 17:10:01 +01:00
|
|
|
self.has_unread = True
|
2015-01-26 15:37:14 +01:00
|
|
|
if isinstance(rejected, list) or isinstance(rejected, tuple):
|
2015-01-13 20:48:37 +01:00
|
|
|
processed = True
|
|
|
|
for pk in rejected:
|
|
|
|
if pk != self.pushkey:
|
|
|
|
# for sanity, we only remove the pushkey if it
|
|
|
|
# was the one we actually sent...
|
|
|
|
logger.warn(
|
2015-01-29 17:10:35 +01:00
|
|
|
("Ignoring rejected pushkey %s because we"
|
|
|
|
" didn't send it"), pk
|
2015-01-13 20:48:37 +01:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
|
|
|
"Pushkey %s was rejected: removing",
|
|
|
|
pk
|
|
|
|
)
|
|
|
|
yield self.hs.get_pusherpool().remove_pusher(
|
|
|
|
self.app_id, pk
|
|
|
|
)
|
|
|
|
|
|
|
|
if not self.alive:
|
|
|
|
continue
|
|
|
|
|
2015-01-13 14:14:41 +01:00
|
|
|
if processed:
|
2014-11-19 19:20:59 +01:00
|
|
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
|
|
|
self.last_token = chunk['end']
|
2014-12-03 14:37:02 +01:00
|
|
|
self.store.update_pusher_last_token_and_success(
|
2015-02-13 17:16:16 +01:00
|
|
|
self.app_id,
|
2014-12-03 14:37:02 +01:00
|
|
|
self.pushkey,
|
|
|
|
self.last_token,
|
|
|
|
self.clock.time_msec()
|
|
|
|
)
|
2014-11-21 13:21:00 +01:00
|
|
|
if self.failing_since:
|
|
|
|
self.failing_since = None
|
2014-12-03 14:37:02 +01:00
|
|
|
self.store.update_pusher_failing_since(
|
2015-02-13 17:16:16 +01:00
|
|
|
self.app_id,
|
2014-12-03 14:37:02 +01:00
|
|
|
self.pushkey,
|
|
|
|
self.failing_since)
|
2014-11-19 19:20:59 +01:00
|
|
|
else:
|
2014-11-21 13:21:00 +01:00
|
|
|
if not self.failing_since:
|
|
|
|
self.failing_since = self.clock.time_msec()
|
2014-12-03 14:37:02 +01:00
|
|
|
self.store.update_pusher_failing_since(
|
2015-02-13 17:16:16 +01:00
|
|
|
self.app_id,
|
2014-12-03 14:37:02 +01:00
|
|
|
self.pushkey,
|
|
|
|
self.failing_since
|
|
|
|
)
|
2014-11-21 13:21:00 +01:00
|
|
|
|
2015-01-28 14:58:32 +01:00
|
|
|
if (self.failing_since and
|
|
|
|
self.failing_since <
|
|
|
|
self.clock.time_msec() - Pusher.GIVE_UP_AFTER):
|
2014-12-03 14:37:02 +01:00
|
|
|
# we really only give up so that if the URL gets
|
|
|
|
# fixed, we don't suddenly deliver a load
|
2014-11-21 13:21:00 +01:00
|
|
|
# of old notifications.
|
2014-12-03 14:37:02 +01:00
|
|
|
logger.warn("Giving up on a notification to user %s, "
|
|
|
|
"pushkey %s",
|
2015-01-29 17:10:35 +01:00
|
|
|
self.user_name, self.pushkey)
|
2014-11-21 13:21:00 +01:00
|
|
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
|
|
|
self.last_token = chunk['end']
|
2014-12-03 14:37:02 +01:00
|
|
|
self.store.update_pusher_last_token(
|
2015-02-13 17:16:16 +01:00
|
|
|
self.app_id,
|
2014-12-03 14:37:02 +01:00
|
|
|
self.pushkey,
|
|
|
|
self.last_token
|
|
|
|
)
|
2014-11-21 13:21:00 +01:00
|
|
|
|
|
|
|
self.failing_since = None
|
2014-12-03 14:37:02 +01:00
|
|
|
self.store.update_pusher_failing_since(
|
2015-02-13 17:16:16 +01:00
|
|
|
self.app_id,
|
2014-12-03 14:37:02 +01:00
|
|
|
self.pushkey,
|
|
|
|
self.failing_since
|
|
|
|
)
|
2014-11-21 13:21:00 +01:00
|
|
|
else:
|
2014-12-03 14:37:02 +01:00
|
|
|
logger.warn("Failed to dispatch push for user %s "
|
|
|
|
"(failing for %dms)."
|
2014-11-21 13:21:00 +01:00
|
|
|
"Trying again in %dms",
|
2014-12-03 14:37:02 +01:00
|
|
|
self.user_name,
|
|
|
|
self.clock.time_msec() - self.failing_since,
|
2015-01-29 17:10:35 +01:00
|
|
|
self.backoff_delay)
|
2014-11-21 13:21:00 +01:00
|
|
|
yield synapse.util.async.sleep(self.backoff_delay / 1000.0)
|
2014-12-03 14:37:02 +01:00
|
|
|
self.backoff_delay *= 2
|
2014-11-21 13:21:00 +01:00
|
|
|
if self.backoff_delay > Pusher.MAX_BACKOFF:
|
|
|
|
self.backoff_delay = Pusher.MAX_BACKOFF
|
2014-11-19 19:20:59 +01:00
|
|
|
|
2014-12-18 15:49:22 +01:00
|
|
|
def stop(self):
|
|
|
|
self.alive = False
|
|
|
|
|
2015-01-23 18:07:06 +01:00
|
|
|
def dispatch_push(self, p, tweaks):
|
2015-01-13 20:48:37 +01:00
|
|
|
"""
|
|
|
|
Overridden by implementing classes to actually deliver the notification
|
2015-01-28 15:10:46 +01:00
|
|
|
Args:
|
2015-01-28 15:14:49 +01:00
|
|
|
p: The event to notify for as a single event from the event stream
|
2015-01-28 15:10:46 +01:00
|
|
|
Returns: If the notification was delivered, an array containing any
|
2015-01-13 20:48:37 +01:00
|
|
|
pushkeys that were rejected by the push gateway.
|
|
|
|
False if the notification could not be delivered (ie.
|
|
|
|
should be retried).
|
|
|
|
"""
|
2014-12-03 14:37:02 +01:00
|
|
|
pass
|
|
|
|
|
2015-01-28 12:55:49 +01:00
|
|
|
def reset_badge_count(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def presence_changed(self, state):
|
|
|
|
"""
|
|
|
|
We clear badge counts whenever a user's last_active time is bumped
|
|
|
|
This is by no means perfect but I think it's the best we can do
|
|
|
|
without read receipts.
|
|
|
|
"""
|
|
|
|
if 'last_active' in state.state:
|
|
|
|
last_active = state.state['last_active']
|
|
|
|
if last_active > self.last_last_active_time:
|
|
|
|
self.last_last_active_time = last_active
|
2015-01-29 17:10:01 +01:00
|
|
|
if self.has_unread:
|
|
|
|
logger.info("Resetting badge count for %s", self.user_name)
|
|
|
|
self.reset_badge_count()
|
|
|
|
self.has_unread = False
|
|
|
|
|
2015-01-28 12:55:49 +01:00
|
|
|
|
2015-01-23 18:07:06 +01:00
|
|
|
def _value_for_dotted_key(dotted_key, event):
|
|
|
|
parts = dotted_key.split(".")
|
|
|
|
val = event
|
|
|
|
while len(parts) > 0:
|
|
|
|
if parts[0] not in val:
|
|
|
|
return None
|
|
|
|
val = val[parts[0]]
|
|
|
|
parts = parts[1:]
|
|
|
|
return val
|
|
|
|
|
2015-01-29 17:10:35 +01:00
|
|
|
|
2015-01-23 18:07:06 +01:00
|
|
|
def _tweaks_for_actions(actions):
|
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
2015-02-03 17:06:31 +01:00
|
|
|
if 'set_tweak' in a and 'value' in a:
|
|
|
|
tweaks[a['set_tweak']] = a['value']
|
2015-01-23 18:07:06 +01:00
|
|
|
return tweaks
|
|
|
|
|
2015-01-29 17:10:35 +01:00
|
|
|
|
2014-11-19 19:20:59 +01:00
|
|
|
class PusherConfigException(Exception):
|
|
|
|
def __init__(self, msg):
|
2015-01-29 17:10:35 +01:00
|
|
|
super(PusherConfigException, self).__init__(msg)
|