Started integration of Logging

pull/23/head
Sami Mokaddem 2017-12-04 16:44:44 +01:00
parent 4c04bd02c6
commit 1386e58cf8
7 changed files with 73 additions and 22 deletions

View File

@ -6,11 +6,14 @@ import os
import configparser
import json
import datetime
import logging
import redis
import util
import users_helper
KEYDAY = "CONTRIB_DAY" # To be used by other module
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
class Contributor_helper:
def __init__(self, serv_redis_db, cfg):
@ -95,10 +98,12 @@ class Contributor_helper:
today_str = util.getDateStrFormat(date)
keyname = "{}:{}:{}".format(self.keyCateg, today_str, categ)
self.serv_redis_db.zincrby(keyname, org, count)
logger.debug('Added to redis: keyname={}, org={}, count={}'.format(keyname, org, count))
def publish_log(self, zmq_name, name, content, channel=""):
to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name }
self.serv_log.publish(channel, json.dumps(to_send))
logger.debug('Published: {}'.format(json.dumps(to_send)))
''' HANDLER '''
#pntMultiplier if one contribution rewards more than others. (e.g. shighting may gives more points than editing)
@ -111,7 +116,7 @@ class Contributor_helper:
nowSec = int(time.time())
pnts_to_add = self.default_pnts_per_contribution
# if there is a contribution, there is a login (even if ti comes from the API)
# if there is a contribution, there is a login (even if it comes from the API)
self.users_helper.add_user_login(nowSec, org)
# is a valid contribution
@ -133,6 +138,7 @@ class Contributor_helper:
keyname = "{}:{}".format(self.keyLastContrib, util.getDateStrFormat(now))
self.serv_redis_db.zadd(keyname, nowSec, org)
logger.debug('Added to redis: keyname={}, nowSec={}, org={}'.format(keyname, nowSec, org))
self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day
awards_given = self.updateOrgContributionRank(org, pnts_to_add, action, contribType, eventTime=datetime.datetime.now(), isLabeled=isLabeled, categ=util.noSpaceLower(categ))
@ -141,6 +147,7 @@ class Contributor_helper:
# update awards given
keyname = "{}:{}".format(self.keyLastAward, util.getDateStrFormat(now))
self.serv_redis_db.zadd(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec }))
logger.debug('Added to redis: keyname={}, nowSec={}, content={}'.format(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec })))
self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day
# publish
self.publish_log(zmq_name, 'CONTRIBUTION', {'org': org, 'award': award, 'epoch': nowSec }, channel=self.CHANNEL_LASTAWARDS)
@ -214,14 +221,17 @@ class Contributor_helper:
if contribType == 'Attribute':
attributeWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), 1)
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), util.ONE_DAY*7)
logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT')))
if contribType == 'Proposal':
proposalWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), 1)
logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), util.ONE_DAY*7)
addContributionToCateg(datetime.datetime.now(), 'proposal')
if contribType == 'Sighting':
sightingWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), 1)
logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), util.ONE_DAY*7)
self.addContributionToCateg(datetime.datetime.now(), 'sighting', orgName)
@ -230,9 +240,11 @@ class Contributor_helper:
if contribType == 'Event':
eventWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), 1)
logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), util.ONE_DAY*7)
eventMonthCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), 1)
logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), util.ONE_DAY*7)
# getRequirement parameters
@ -275,6 +287,7 @@ class Contributor_helper:
for rankReq, ttl in contrib:
self.serv_redis_db.set(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), 1)
logger.debug('Set: keyname={}'.format(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq))))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), ttl)
ContributionStatus = self.getCurrentContributionStatus(orgName)
@ -322,10 +335,12 @@ class Contributor_helper:
def giveBadgeToOrg(self, org, badgeNum):
keyname = '{mainKey}:{org}:{orgCateg}'
self.serv_redis_db.set(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum)), 1)
logger.debug('Giving badge {} to org {}'.format(org, badgeNum))
def removeBadgeFromOrg(self, org, badgeNum):
keyname = '{mainKey}:{org}:{orgCateg}'
self.serv_redis_db.delete(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum)))
logger.debug('Removing badge {} from org {}'.format(org, badgeNum))
''' TROPHIES '''
def getOrgTrophies(self, org):
@ -360,7 +375,6 @@ class Contributor_helper:
def posToRankMapping(self, pos, totNum):
mapping = self.trophyMapping
mapping_num = [math.ceil(float(float(totNum*i)/float(100))) for i in mapping]
# print(pos, totNum)
if pos == 0: #first
position = 1
else:
@ -377,10 +391,12 @@ class Contributor_helper:
def giveTrophyPointsToOrg(self, org, categ, points):
keyname = '{mainKey}:{orgCateg}'
self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, points)
logger.debug('Giving {} trophy points to {} in {}'.format(points, org, categ))
def removeTrophyPointsFromOrg(self, org, categ, points):
keyname = '{mainKey}:{orgCateg}'
self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, -points)
logger.debug('Removing {} trophy points from {} in {}'.format(points, org, categ))
''' AWARDS HELPER '''
def getLastAwardsFromRedis(self):

View File

@ -2,6 +2,7 @@ import math, random
import os
import json
import datetime, time
import logging
import json
import redis
from collections import OrderedDict
@ -12,6 +13,9 @@ from phonenumbers import geocoder
import util
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
class Geo_helper:
def __init__(self, serv_redis_db, cfg):
self.serv_redis_db = serv_redis_db
@ -102,21 +106,22 @@ class Geo_helper:
"regionCode": rep['full_rep'].country.iso_code,
}
self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send))
logger.info('Published: {}'.format(json.dumps(to_send)))
except ValueError:
print("can't resolve ip")
logger.warning("can't resolve ip")
except geoip2.errors.AddressNotFoundError:
print("Address not in Database")
logger.warning("Address not in Database")
def getCoordFromPhoneAndPublish(self, phoneNumber, categ):
try:
rep = phonenumbers.parse(phoneNumber, None)
if not (phonenumbers.is_valid_number(rep) or phonenumbers.is_possible_number(rep)):
print("Phone number not valid")
logger.warning("Phone number not valid")
return
country_name = geocoder.country_name_for_number(rep, "en")
country_code = self.country_to_iso[country_name]
if country_code is None:
print("Non matching ISO_CODE")
logger.warning("Non matching ISO_CODE")
return
coord = self.country_code_to_coord[country_code.lower()] # countrycode is in upper case
coord_dic = {'lat': coord['lat'], 'lon': coord['long']}
@ -141,8 +146,9 @@ class Geo_helper:
"regionCode": country_code,
}
self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send))
logger.info('Published: {}'.format(json.dumps(to_send)))
except phonenumbers.NumberParseException:
print("Can't resolve phone number country")
logger.warning("Can't resolve phone number country")
''' UTIL '''
def push_to_redis_geo(self, keyCateg, lon, lat, content):
@ -150,12 +156,13 @@ class Geo_helper:
today_str = util.getDateStrFormat(now)
keyname = "{}:{}".format(keyCateg, today_str)
self.serv_redis_db.geoadd(keyname, lon, lat, content)
logger.debug('Added to redis: keyname={}, lon={}, lat={}, content={}'.format(keyname, lon, lat, content))
def push_to_redis_zset(self, keyCateg, toAdd, endSubkey="", count=1):
now = datetime.datetime.now()
today_str = util.getDateStrFormat(now)
keyname = "{}:{}{}".format(keyCateg, today_str, endSubkey)
self.serv_redis_db.zincrby(keyname, toAdd, count)
logger.debug('Added to redis: keyname={}, toAdd={}, count={}'.format(keyname, toAdd, count))
def ip_to_coord(self, ip):
resp = self.reader.city(ip)

View File

@ -8,6 +8,7 @@ from time import gmtime as now
from time import sleep, strftime
import datetime
import os
import logging
import util
import geo_helper
@ -19,6 +20,9 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
cfg = configparser.ConfigParser()
cfg.read(configfile)
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
app = Flask(__name__)
redis_server_log = redis.StrictRedis(
@ -99,8 +103,8 @@ class EventMessage():
msg = msg.decode('utf8')
try:
jsonMsg = json.loads(msg)
except json.JSONDecodeError:
print('json decode error')
except json.JSONDecodeError as e:
logger.error(e)
jsonMsg = { 'name': "undefined" ,'log': json.loads(msg) }
self.feedName = jsonMsg['name']

View File

@ -2,10 +2,14 @@ import math, random
import os
import json
import datetime, time
import logging
from collections import OrderedDict
import util
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
class Trendings_helper:
def __init__(self, serv_redis_db, cfg):
self.serv_redis_db = serv_redis_db
@ -30,6 +34,7 @@ class Trendings_helper:
else:
to_save = data
self.serv_redis_db.zincrby(keyname, to_save, 1)
logger.debug('Added to redis: keyname={}, content={}'.format(keyname, to_save))
def addTrendingEvent(self, eventName, timestamp):
self.addGenericTrending(self.keyEvent, eventName, timestamp)
@ -53,12 +58,14 @@ class Trendings_helper:
timestampDate_str = util.getDateStrFormat(timestampDate)
keyname = "{}:{}".format(self.keySigh, timestampDate_str)
self.serv_redis_db.incrby(keyname, 1)
logger.debug('Incrby: keyname={}'.format(keyname))
def addFalsePositive(self, timestamp):
timestampDate = datetime.datetime.fromtimestamp(float(timestamp))
timestampDate_str = util.getDateStrFormat(timestampDate)
keyname = "{}:{}".format(self.keyFalse, timestampDate_str)
self.serv_redis_db.incrby(keyname, 1)
logger.debug('Incrby: keyname={}'.format(keyname))
''' GETTER '''

View File

@ -2,10 +2,14 @@ import math, random
import os
import json
import datetime, time
import logging
import util
import contributor_helper
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
class Users_helper:
def __init__(self, serv_redis_db, cfg):
self.serv_redis_db = serv_redis_db
@ -21,6 +25,7 @@ class Users_helper:
timestampDate_str = util.getDateHoursStrFormat(timestampDate)
keyname_timestamp = "{}:{}".format(self.keyTimestampSet, timestampDate_str)
self.serv_redis_db.sadd(keyname_timestamp, org)
logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, org))
self.serv_redis_db.expire(keyname_timestamp, 60*60)
def hasAlreadyBeenAdded(self, org, timestamp):
@ -39,10 +44,12 @@ class Users_helper:
if not self.hasAlreadyBeenAdded(org, timestamp):
keyname_timestamp = "{}:{}".format(self.keyTimestamp, timestampDate_str)
self.serv_redis_db.sadd(keyname_timestamp, timestamp)
logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, timestamp))
self.addTemporary(org, timestamp)
keyname_org = "{}:{}".format(self.keyOrgLog, timestampDate_str)
self.serv_redis_db.zincrby(keyname_org, org, 1)
logger.debug('Added to redis: keyname={}, org={}'.format(keyname_org, org))
def getUserLogins(self, date):
keyname = "{}:{}".format(self.keyTimestamp, util.getDateStrFormat(date))

View File

@ -2,7 +2,7 @@
import time, datetime
import copy
from pprint import pprint
import logging
import zmq
import redis
import random
@ -22,6 +22,9 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
cfg = configparser.ConfigParser()
cfg.read(configfile)
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
CHANNEL = cfg.get('RedisLog', 'channel')
LISTNAME = cfg.get('RedisLIST', 'listName')
@ -47,6 +50,7 @@ trendings_helper = trendings_helper.Trendings_helper(serv_redis_db, cfg)
def publish_log(zmq_name, name, content, channel=CHANNEL):
to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name }
serv_log.publish(channel, json.dumps(to_send))
logger.debug('Published: {}'.format(json.dumps(to_send)))
def getFields(obj, fields):
jsonWalker = fields.split('.')
@ -68,7 +72,7 @@ def getFields(obj, fields):
##############
def handler_log(zmq_name, jsonevent):
print('sending', 'log')
logger.info('Log not processed')
return
def handler_dispatcher(zmq_name, jsonObj):
@ -76,12 +80,12 @@ def handler_dispatcher(zmq_name, jsonObj):
handler_event(zmq_name, jsonObj)
def handler_keepalive(zmq_name, jsonevent):
print('sending', 'keepalive')
logger.info('Handling keepalive')
to_push = [ jsonevent['uptime'] ]
publish_log(zmq_name, 'Keepalive', to_push)
def handler_user(zmq_name, jsondata):
print('sending', 'user')
logger.info('Handling user')
action = jsondata['action']
json_user = jsondata['User']
json_org = jsondata['Organisation']
@ -93,11 +97,11 @@ def handler_user(zmq_name, jsondata):
pass
def handler_conversation(zmq_name, jsonevent):
logger.info('Handling conversation')
try: #only consider POST, not THREAD
jsonpost = jsonevent['Post']
except KeyError:
return
print('sending' ,'Post')
except KeyError as e:
logger.error('Error in handler_conversation: {}'.format(e))
org = jsonpost['org_name']
categ = None
action = 'add'
@ -112,11 +116,11 @@ def handler_conversation(zmq_name, jsonevent):
trendings_helper.addTrendingDisc(eventName, nowSec)
def handler_object(zmq_name, jsondata):
print('obj')
logger.info('Handling object')
return
def handler_sighting(zmq_name, jsondata):
print('sending' ,'sighting')
logger.info('Handling sighting')
jsonsight = jsondata['Sighting']
org = jsonsight['Event']['Orgc']['name']
categ = jsonsight['Attribute']['category']
@ -132,6 +136,7 @@ def handler_sighting(zmq_name, jsondata):
trendings_helper.addFalsePositive(timestamp)
def handler_event(zmq_name, jsonobj):
logger.info('Handling event')
#fields: threat_level_id, id, info
jsonevent = jsonobj['Event']
@ -170,6 +175,7 @@ def handler_event(zmq_name, jsonobj):
isLabeled=eventLabeled)
def handler_attribute(zmq_name, jsonobj, hasAlreadyBeenContributed=False):
logger.info('Handling attribute')
# check if jsonattr is an attribute object
if 'Attribute' in jsonobj:
jsonattr = jsonobj['Attribute']
@ -227,7 +233,7 @@ def process_log(zmq_name, event):
try:
dico_action[topic](zmq_name, jsonevent)
except KeyError as e:
print(e)
logger.error(e)
def main(sleeptime):
@ -235,7 +241,7 @@ def main(sleeptime):
while True:
content = serv_list.rpop(LISTNAME)
if content is None:
print('Processed', numMsg, 'message(s) since last sleep.')
logger.info('Processed {} message(s) since last sleep.'.format(numMsg))
numMsg = 0
time.sleep(sleeptime)
continue

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python3.5
import time, datetime
from pprint import pprint
import zmq
import logging
import redis
import configparser
import argparse
@ -14,6 +14,9 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
cfg = configparser.ConfigParser()
cfg.read(configfile)
logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO)
logger = logging.getLogger(__name__)
ZMQ_URL = cfg.get('RedisGlobal', 'zmq_url')
CHANNEL = cfg.get('RedisLog', 'channel')
LISTNAME = cfg.get('RedisLIST', 'listName')
@ -32,6 +35,7 @@ def put_in_redis_list(zmq_name, content):
content = content.decode('utf8')
to_add = {'zmq_name': zmq_name, 'content': content}
serv_list.lpush(LISTNAME, json.dumps(to_add))
logger.debug('Pushed: {}'.format(json.dumps(to_add)))
def main(zmqName):
context = zmq.Context()