From 1386e58cf8cad9837e3ffda864c04c28339e40a4 Mon Sep 17 00:00:00 2001 From: Sami Mokaddem Date: Mon, 4 Dec 2017 16:44:44 +0100 Subject: [PATCH 1/6] Started integration of Logging --- contributor_helper.py | 20 ++++++++++++++++++-- geo_helper.py | 19 +++++++++++++------ server.py | 8 ++++++-- trendings_helper.py | 7 +++++++ users_helper.py | 7 +++++++ zmq_dispatcher.py | 28 +++++++++++++++++----------- zmq_subscriber.py | 6 +++++- 7 files changed, 73 insertions(+), 22 deletions(-) diff --git a/contributor_helper.py b/contributor_helper.py index 6924218..0c51af3 100644 --- a/contributor_helper.py +++ b/contributor_helper.py @@ -6,11 +6,14 @@ import os import configparser import json import datetime +import logging import redis import util import users_helper KEYDAY = "CONTRIB_DAY" # To be used by other module +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) class Contributor_helper: def __init__(self, serv_redis_db, cfg): @@ -95,10 +98,12 @@ class Contributor_helper: today_str = util.getDateStrFormat(date) keyname = "{}:{}:{}".format(self.keyCateg, today_str, categ) self.serv_redis_db.zincrby(keyname, org, count) + logger.debug('Added to redis: keyname={}, org={}, count={}'.format(keyname, org, count)) def publish_log(self, zmq_name, name, content, channel=""): to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name } self.serv_log.publish(channel, json.dumps(to_send)) + logger.debug('Published: {}'.format(json.dumps(to_send))) ''' HANDLER ''' #pntMultiplier if one contribution rewards more than others. (e.g. shighting may gives more points than editing) @@ -111,7 +116,7 @@ class Contributor_helper: nowSec = int(time.time()) pnts_to_add = self.default_pnts_per_contribution - # if there is a contribution, there is a login (even if ti comes from the API) + # if there is a contribution, there is a login (even if it comes from the API) self.users_helper.add_user_login(nowSec, org) # is a valid contribution @@ -133,6 +138,7 @@ class Contributor_helper: keyname = "{}:{}".format(self.keyLastContrib, util.getDateStrFormat(now)) self.serv_redis_db.zadd(keyname, nowSec, org) + logger.debug('Added to redis: keyname={}, nowSec={}, org={}'.format(keyname, nowSec, org)) self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day awards_given = self.updateOrgContributionRank(org, pnts_to_add, action, contribType, eventTime=datetime.datetime.now(), isLabeled=isLabeled, categ=util.noSpaceLower(categ)) @@ -141,6 +147,7 @@ class Contributor_helper: # update awards given keyname = "{}:{}".format(self.keyLastAward, util.getDateStrFormat(now)) self.serv_redis_db.zadd(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec })) + logger.debug('Added to redis: keyname={}, nowSec={}, content={}'.format(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec }))) self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day # publish self.publish_log(zmq_name, 'CONTRIBUTION', {'org': org, 'award': award, 'epoch': nowSec }, channel=self.CHANNEL_LASTAWARDS) @@ -214,14 +221,17 @@ class Contributor_helper: if contribType == 'Attribute': attributeWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), 1) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), util.ONE_DAY*7) + logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'))) if contribType == 'Proposal': proposalWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), 1) + logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), util.ONE_DAY*7) addContributionToCateg(datetime.datetime.now(), 'proposal') if contribType == 'Sighting': sightingWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), 1) + logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), util.ONE_DAY*7) self.addContributionToCateg(datetime.datetime.now(), 'sighting', orgName) @@ -230,9 +240,11 @@ class Contributor_helper: if contribType == 'Event': eventWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), 1) + logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), util.ONE_DAY*7) eventMonthCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), 1) + logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), util.ONE_DAY*7) # getRequirement parameters @@ -275,6 +287,7 @@ class Contributor_helper: for rankReq, ttl in contrib: self.serv_redis_db.set(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), 1) + logger.debug('Set: keyname={}'.format(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), ttl) ContributionStatus = self.getCurrentContributionStatus(orgName) @@ -322,10 +335,12 @@ class Contributor_helper: def giveBadgeToOrg(self, org, badgeNum): keyname = '{mainKey}:{org}:{orgCateg}' self.serv_redis_db.set(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum)), 1) + logger.debug('Giving badge {} to org {}'.format(org, badgeNum)) def removeBadgeFromOrg(self, org, badgeNum): keyname = '{mainKey}:{org}:{orgCateg}' self.serv_redis_db.delete(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum))) + logger.debug('Removing badge {} from org {}'.format(org, badgeNum)) ''' TROPHIES ''' def getOrgTrophies(self, org): @@ -360,7 +375,6 @@ class Contributor_helper: def posToRankMapping(self, pos, totNum): mapping = self.trophyMapping mapping_num = [math.ceil(float(float(totNum*i)/float(100))) for i in mapping] - # print(pos, totNum) if pos == 0: #first position = 1 else: @@ -377,10 +391,12 @@ class Contributor_helper: def giveTrophyPointsToOrg(self, org, categ, points): keyname = '{mainKey}:{orgCateg}' self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, points) + logger.debug('Giving {} trophy points to {} in {}'.format(points, org, categ)) def removeTrophyPointsFromOrg(self, org, categ, points): keyname = '{mainKey}:{orgCateg}' self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, -points) + logger.debug('Removing {} trophy points from {} in {}'.format(points, org, categ)) ''' AWARDS HELPER ''' def getLastAwardsFromRedis(self): diff --git a/geo_helper.py b/geo_helper.py index 0a7f2da..17d7241 100644 --- a/geo_helper.py +++ b/geo_helper.py @@ -2,6 +2,7 @@ import math, random import os import json import datetime, time +import logging import json import redis from collections import OrderedDict @@ -12,6 +13,9 @@ from phonenumbers import geocoder import util +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) + class Geo_helper: def __init__(self, serv_redis_db, cfg): self.serv_redis_db = serv_redis_db @@ -102,21 +106,22 @@ class Geo_helper: "regionCode": rep['full_rep'].country.iso_code, } self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send)) + logger.info('Published: {}'.format(json.dumps(to_send))) except ValueError: - print("can't resolve ip") + logger.warning("can't resolve ip") except geoip2.errors.AddressNotFoundError: - print("Address not in Database") + logger.warning("Address not in Database") def getCoordFromPhoneAndPublish(self, phoneNumber, categ): try: rep = phonenumbers.parse(phoneNumber, None) if not (phonenumbers.is_valid_number(rep) or phonenumbers.is_possible_number(rep)): - print("Phone number not valid") + logger.warning("Phone number not valid") return country_name = geocoder.country_name_for_number(rep, "en") country_code = self.country_to_iso[country_name] if country_code is None: - print("Non matching ISO_CODE") + logger.warning("Non matching ISO_CODE") return coord = self.country_code_to_coord[country_code.lower()] # countrycode is in upper case coord_dic = {'lat': coord['lat'], 'lon': coord['long']} @@ -141,8 +146,9 @@ class Geo_helper: "regionCode": country_code, } self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send)) + logger.info('Published: {}'.format(json.dumps(to_send))) except phonenumbers.NumberParseException: - print("Can't resolve phone number country") + logger.warning("Can't resolve phone number country") ''' UTIL ''' def push_to_redis_geo(self, keyCateg, lon, lat, content): @@ -150,12 +156,13 @@ class Geo_helper: today_str = util.getDateStrFormat(now) keyname = "{}:{}".format(keyCateg, today_str) self.serv_redis_db.geoadd(keyname, lon, lat, content) + logger.debug('Added to redis: keyname={}, lon={}, lat={}, content={}'.format(keyname, lon, lat, content)) def push_to_redis_zset(self, keyCateg, toAdd, endSubkey="", count=1): now = datetime.datetime.now() today_str = util.getDateStrFormat(now) keyname = "{}:{}{}".format(keyCateg, today_str, endSubkey) self.serv_redis_db.zincrby(keyname, toAdd, count) - + logger.debug('Added to redis: keyname={}, toAdd={}, count={}'.format(keyname, toAdd, count)) def ip_to_coord(self, ip): resp = self.reader.city(ip) diff --git a/server.py b/server.py index 6866f60..3aec17d 100755 --- a/server.py +++ b/server.py @@ -8,6 +8,7 @@ from time import gmtime as now from time import sleep, strftime import datetime import os +import logging import util import geo_helper @@ -19,6 +20,9 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) + app = Flask(__name__) redis_server_log = redis.StrictRedis( @@ -99,8 +103,8 @@ class EventMessage(): msg = msg.decode('utf8') try: jsonMsg = json.loads(msg) - except json.JSONDecodeError: - print('json decode error') + except json.JSONDecodeError as e: + logger.error(e) jsonMsg = { 'name': "undefined" ,'log': json.loads(msg) } self.feedName = jsonMsg['name'] diff --git a/trendings_helper.py b/trendings_helper.py index 19c5dd6..b99803e 100644 --- a/trendings_helper.py +++ b/trendings_helper.py @@ -2,10 +2,14 @@ import math, random import os import json import datetime, time +import logging from collections import OrderedDict import util +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) + class Trendings_helper: def __init__(self, serv_redis_db, cfg): self.serv_redis_db = serv_redis_db @@ -30,6 +34,7 @@ class Trendings_helper: else: to_save = data self.serv_redis_db.zincrby(keyname, to_save, 1) + logger.debug('Added to redis: keyname={}, content={}'.format(keyname, to_save)) def addTrendingEvent(self, eventName, timestamp): self.addGenericTrending(self.keyEvent, eventName, timestamp) @@ -53,12 +58,14 @@ class Trendings_helper: timestampDate_str = util.getDateStrFormat(timestampDate) keyname = "{}:{}".format(self.keySigh, timestampDate_str) self.serv_redis_db.incrby(keyname, 1) + logger.debug('Incrby: keyname={}'.format(keyname)) def addFalsePositive(self, timestamp): timestampDate = datetime.datetime.fromtimestamp(float(timestamp)) timestampDate_str = util.getDateStrFormat(timestampDate) keyname = "{}:{}".format(self.keyFalse, timestampDate_str) self.serv_redis_db.incrby(keyname, 1) + logger.debug('Incrby: keyname={}'.format(keyname)) ''' GETTER ''' diff --git a/users_helper.py b/users_helper.py index 12576a3..0a3cda6 100644 --- a/users_helper.py +++ b/users_helper.py @@ -2,10 +2,14 @@ import math, random import os import json import datetime, time +import logging import util import contributor_helper +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) + class Users_helper: def __init__(self, serv_redis_db, cfg): self.serv_redis_db = serv_redis_db @@ -21,6 +25,7 @@ class Users_helper: timestampDate_str = util.getDateHoursStrFormat(timestampDate) keyname_timestamp = "{}:{}".format(self.keyTimestampSet, timestampDate_str) self.serv_redis_db.sadd(keyname_timestamp, org) + logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, org)) self.serv_redis_db.expire(keyname_timestamp, 60*60) def hasAlreadyBeenAdded(self, org, timestamp): @@ -39,10 +44,12 @@ class Users_helper: if not self.hasAlreadyBeenAdded(org, timestamp): keyname_timestamp = "{}:{}".format(self.keyTimestamp, timestampDate_str) self.serv_redis_db.sadd(keyname_timestamp, timestamp) + logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, timestamp)) self.addTemporary(org, timestamp) keyname_org = "{}:{}".format(self.keyOrgLog, timestampDate_str) self.serv_redis_db.zincrby(keyname_org, org, 1) + logger.debug('Added to redis: keyname={}, org={}'.format(keyname_org, org)) def getUserLogins(self, date): keyname = "{}:{}".format(self.keyTimestamp, util.getDateStrFormat(date)) diff --git a/zmq_dispatcher.py b/zmq_dispatcher.py index aee91ce..db8b51e 100755 --- a/zmq_dispatcher.py +++ b/zmq_dispatcher.py @@ -2,7 +2,7 @@ import time, datetime import copy -from pprint import pprint +import logging import zmq import redis import random @@ -22,6 +22,9 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) + CHANNEL = cfg.get('RedisLog', 'channel') LISTNAME = cfg.get('RedisLIST', 'listName') @@ -47,6 +50,7 @@ trendings_helper = trendings_helper.Trendings_helper(serv_redis_db, cfg) def publish_log(zmq_name, name, content, channel=CHANNEL): to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name } serv_log.publish(channel, json.dumps(to_send)) + logger.debug('Published: {}'.format(json.dumps(to_send))) def getFields(obj, fields): jsonWalker = fields.split('.') @@ -68,7 +72,7 @@ def getFields(obj, fields): ############## def handler_log(zmq_name, jsonevent): - print('sending', 'log') + logger.info('Log not processed') return def handler_dispatcher(zmq_name, jsonObj): @@ -76,12 +80,12 @@ def handler_dispatcher(zmq_name, jsonObj): handler_event(zmq_name, jsonObj) def handler_keepalive(zmq_name, jsonevent): - print('sending', 'keepalive') + logger.info('Handling keepalive') to_push = [ jsonevent['uptime'] ] publish_log(zmq_name, 'Keepalive', to_push) def handler_user(zmq_name, jsondata): - print('sending', 'user') + logger.info('Handling user') action = jsondata['action'] json_user = jsondata['User'] json_org = jsondata['Organisation'] @@ -93,11 +97,11 @@ def handler_user(zmq_name, jsondata): pass def handler_conversation(zmq_name, jsonevent): + logger.info('Handling conversation') try: #only consider POST, not THREAD jsonpost = jsonevent['Post'] - except KeyError: - return - print('sending' ,'Post') + except KeyError as e: + logger.error('Error in handler_conversation: {}'.format(e)) org = jsonpost['org_name'] categ = None action = 'add' @@ -112,11 +116,11 @@ def handler_conversation(zmq_name, jsonevent): trendings_helper.addTrendingDisc(eventName, nowSec) def handler_object(zmq_name, jsondata): - print('obj') + logger.info('Handling object') return def handler_sighting(zmq_name, jsondata): - print('sending' ,'sighting') + logger.info('Handling sighting') jsonsight = jsondata['Sighting'] org = jsonsight['Event']['Orgc']['name'] categ = jsonsight['Attribute']['category'] @@ -132,6 +136,7 @@ def handler_sighting(zmq_name, jsondata): trendings_helper.addFalsePositive(timestamp) def handler_event(zmq_name, jsonobj): + logger.info('Handling event') #fields: threat_level_id, id, info jsonevent = jsonobj['Event'] @@ -170,6 +175,7 @@ def handler_event(zmq_name, jsonobj): isLabeled=eventLabeled) def handler_attribute(zmq_name, jsonobj, hasAlreadyBeenContributed=False): + logger.info('Handling attribute') # check if jsonattr is an attribute object if 'Attribute' in jsonobj: jsonattr = jsonobj['Attribute'] @@ -227,7 +233,7 @@ def process_log(zmq_name, event): try: dico_action[topic](zmq_name, jsonevent) except KeyError as e: - print(e) + logger.error(e) def main(sleeptime): @@ -235,7 +241,7 @@ def main(sleeptime): while True: content = serv_list.rpop(LISTNAME) if content is None: - print('Processed', numMsg, 'message(s) since last sleep.') + logger.info('Processed {} message(s) since last sleep.'.format(numMsg)) numMsg = 0 time.sleep(sleeptime) continue diff --git a/zmq_subscriber.py b/zmq_subscriber.py index e722012..4931819 100755 --- a/zmq_subscriber.py +++ b/zmq_subscriber.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3.5 import time, datetime -from pprint import pprint import zmq +import logging import redis import configparser import argparse @@ -14,6 +14,9 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) +logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logger = logging.getLogger(__name__) + ZMQ_URL = cfg.get('RedisGlobal', 'zmq_url') CHANNEL = cfg.get('RedisLog', 'channel') LISTNAME = cfg.get('RedisLIST', 'listName') @@ -32,6 +35,7 @@ def put_in_redis_list(zmq_name, content): content = content.decode('utf8') to_add = {'zmq_name': zmq_name, 'content': content} serv_list.lpush(LISTNAME, json.dumps(to_add)) + logger.debug('Pushed: {}'.format(json.dumps(to_add))) def main(zmqName): context = zmq.Context() From b59827591d28403e556c82127ada38abedeafdd7 Mon Sep 17 00:00:00 2001 From: Sami Mokaddem Date: Tue, 5 Dec 2017 09:02:49 +0100 Subject: [PATCH 2/6] update: changed some log levels --- server.py | 4 ++-- zmq_dispatcher.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server.py b/server.py index 3aec17d..d82de5c 100755 --- a/server.py +++ b/server.py @@ -20,8 +20,8 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) +logger = logging.getLogger('werkzeug') +logger.setLevel(logging.ERROR) app = Flask(__name__) diff --git a/zmq_dispatcher.py b/zmq_dispatcher.py index db8b51e..a8afb03 100755 --- a/zmq_dispatcher.py +++ b/zmq_dispatcher.py @@ -241,7 +241,7 @@ def main(sleeptime): while True: content = serv_list.rpop(LISTNAME) if content is None: - logger.info('Processed {} message(s) since last sleep.'.format(numMsg)) + logger.debug('Processed {} message(s) since last sleep.'.format(numMsg)) numMsg = 0 time.sleep(sleeptime) continue From bdef0c21e93e515f61f4634f015cb2ed8b7ac074 Mon Sep 17 00:00:00 2001 From: Sami Mokaddem Date: Tue, 5 Dec 2017 09:56:32 +0100 Subject: [PATCH 3/6] Moved helper scripts in their folder + updated import path --- contributor_helper.py => helpers/contributor_helper.py | 2 +- geo_helper.py => helpers/geo_helper.py | 0 trendings_helper.py => helpers/trendings_helper.py | 0 users_helper.py => helpers/users_helper.py | 2 +- server.py | 8 ++++---- zmq_dispatcher.py | 10 +++++----- zmq_subscriber.py | 2 +- 7 files changed, 12 insertions(+), 12 deletions(-) rename contributor_helper.py => helpers/contributor_helper.py (99%) rename geo_helper.py => helpers/geo_helper.py (100%) rename trendings_helper.py => helpers/trendings_helper.py (100%) rename users_helper.py => helpers/users_helper.py (99%) diff --git a/contributor_helper.py b/helpers/contributor_helper.py similarity index 99% rename from contributor_helper.py rename to helpers/contributor_helper.py index 0c51af3..d7cb412 100644 --- a/contributor_helper.py +++ b/helpers/contributor_helper.py @@ -10,7 +10,7 @@ import logging import redis import util -import users_helper +from . import users_helper KEYDAY = "CONTRIB_DAY" # To be used by other module logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/geo_helper.py b/helpers/geo_helper.py similarity index 100% rename from geo_helper.py rename to helpers/geo_helper.py diff --git a/trendings_helper.py b/helpers/trendings_helper.py similarity index 100% rename from trendings_helper.py rename to helpers/trendings_helper.py diff --git a/users_helper.py b/helpers/users_helper.py similarity index 99% rename from users_helper.py rename to helpers/users_helper.py index 0a3cda6..81cba1e 100644 --- a/users_helper.py +++ b/helpers/users_helper.py @@ -5,7 +5,7 @@ import datetime, time import logging import util -import contributor_helper +from . import contributor_helper logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) logger = logging.getLogger(__name__) diff --git a/server.py b/server.py index d82de5c..ed636d4 100755 --- a/server.py +++ b/server.py @@ -11,10 +11,10 @@ import os import logging import util -import geo_helper -import contributor_helper -import users_helper -import trendings_helper +from helpers import geo_helper +from helpers import contributor_helper +from helpers import users_helper +from helpers import trendings_helper configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() diff --git a/zmq_dispatcher.py b/zmq_dispatcher.py index a8afb03..2324bed 100755 --- a/zmq_dispatcher.py +++ b/zmq_dispatcher.py @@ -13,17 +13,17 @@ import sys import json import util -import geo_helper -import contributor_helper -import users_helper -import trendings_helper +from helpers import geo_helper +from helpers import contributor_helper +from helpers import users_helper +from helpers import trendings_helper configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) +logger = logging.getLogger('zmq_dispatcher') CHANNEL = cfg.get('RedisLog', 'channel') LISTNAME = cfg.get('RedisLIST', 'listName') diff --git a/zmq_subscriber.py b/zmq_subscriber.py index 4931819..281490c 100755 --- a/zmq_subscriber.py +++ b/zmq_subscriber.py @@ -15,7 +15,7 @@ cfg = configparser.ConfigParser() cfg.read(configfile) logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) +logger = logging.getLogger('zmq_subscriber') ZMQ_URL = cfg.get('RedisGlobal', 'zmq_url') CHANNEL = cfg.get('RedisLog', 'channel') From 9139ed7eef4ca45aaaca8e9fd8eae7292c51e3b5 Mon Sep 17 00:00:00 2001 From: Sami Mokaddem Date: Tue, 5 Dec 2017 10:23:40 +0100 Subject: [PATCH 4/6] Added logger in config --- config/config.cfg.default | 8 ++++--- helpers/contributor_helper.py | 39 +++++++++++++++++++++-------------- helpers/geo_helper.py | 30 ++++++++++++++++----------- helpers/trendings_helper.py | 18 ++++++++++------ helpers/users_helper.py | 17 ++++++++++----- server.py | 2 +- zmq_dispatcher.py | 9 ++++++-- zmq_subscriber.py | 8 +++++-- 8 files changed, 84 insertions(+), 47 deletions(-) diff --git a/config/config.cfg.default b/config/config.cfg.default index 6b3b46b..6f2c027 100644 --- a/config/config.cfg.default +++ b/config/config.cfg.default @@ -11,6 +11,9 @@ item_to_plot = Attribute.category size_dashboard_left_width = 5 size_openStreet_pannel_perc = 55 size_world_pannel_perc = 35 +field_to_plot = Attribute.category +fieldname_order=["Event.id", "Attribute.Tag", "Attribute.category", "Attribute.type", ["Attribute.value", "Attribute.comment"]] +char_separator=|| [GEO] #min @@ -25,9 +28,8 @@ min_between_reload = 5 additional_help_text = ["Sightings multiplies earned points by 2", "Editing an attribute earns you the same as creating one"] [Log] -field_to_plot = Attribute.category -fieldname_order=["Event.id", "Attribute.Tag", "Attribute.category", "Attribute.type", ["Attribute.value", "Attribute.comment"]] -char_separator=|| +directory=logs +filename=logs.log [RedisGlobal] host=localhost diff --git a/helpers/contributor_helper.py b/helpers/contributor_helper.py index d7cb412..ae446c6 100644 --- a/helpers/contributor_helper.py +++ b/helpers/contributor_helper.py @@ -12,8 +12,6 @@ import redis import util from . import users_helper KEYDAY = "CONTRIB_DAY" # To be used by other module -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) class Contributor_helper: def __init__(self, serv_redis_db, cfg): @@ -29,6 +27,15 @@ class Contributor_helper: self.CHANNEL_LASTCONTRIB = cfg.get('RedisLog', 'channelLastContributor') self.users_helper = users_helper.Users_helper(serv_redis_db, cfg) + #logger + logDir = cfg.get('Log', 'directory') + logfilename = cfg.get('Log', 'filename') + logPath = os.path.join(logDir, logfilename) + if not os.path.exists(logDir): + os.makedirs(logDir) + logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + self.logger = logging.getLogger(__name__) + #honorBadge self.honorBadgeNum = len(self.cfg_org_rank.options('HonorBadge')) self.heavilyCount = self.cfg_org_rank.getint('rankRequirementsMisc', 'heavilyCount') @@ -98,12 +105,12 @@ class Contributor_helper: today_str = util.getDateStrFormat(date) keyname = "{}:{}:{}".format(self.keyCateg, today_str, categ) self.serv_redis_db.zincrby(keyname, org, count) - logger.debug('Added to redis: keyname={}, org={}, count={}'.format(keyname, org, count)) + self.logger.debug('Added to redis: keyname={}, org={}, count={}'.format(keyname, org, count)) def publish_log(self, zmq_name, name, content, channel=""): to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name } self.serv_log.publish(channel, json.dumps(to_send)) - logger.debug('Published: {}'.format(json.dumps(to_send))) + self.logger.debug('Published: {}'.format(json.dumps(to_send))) ''' HANDLER ''' #pntMultiplier if one contribution rewards more than others. (e.g. shighting may gives more points than editing) @@ -138,7 +145,7 @@ class Contributor_helper: keyname = "{}:{}".format(self.keyLastContrib, util.getDateStrFormat(now)) self.serv_redis_db.zadd(keyname, nowSec, org) - logger.debug('Added to redis: keyname={}, nowSec={}, org={}'.format(keyname, nowSec, org)) + self.logger.debug('Added to redis: keyname={}, nowSec={}, org={}'.format(keyname, nowSec, org)) self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day awards_given = self.updateOrgContributionRank(org, pnts_to_add, action, contribType, eventTime=datetime.datetime.now(), isLabeled=isLabeled, categ=util.noSpaceLower(categ)) @@ -147,7 +154,7 @@ class Contributor_helper: # update awards given keyname = "{}:{}".format(self.keyLastAward, util.getDateStrFormat(now)) self.serv_redis_db.zadd(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec })) - logger.debug('Added to redis: keyname={}, nowSec={}, content={}'.format(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec }))) + self.logger.debug('Added to redis: keyname={}, nowSec={}, content={}'.format(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec }))) self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day # publish self.publish_log(zmq_name, 'CONTRIBUTION', {'org': org, 'award': award, 'epoch': nowSec }, channel=self.CHANNEL_LASTAWARDS) @@ -221,17 +228,17 @@ class Contributor_helper: if contribType == 'Attribute': attributeWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), 1) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), util.ONE_DAY*7) - logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'))) + self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'))) if contribType == 'Proposal': proposalWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), 1) - logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'))) + self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), util.ONE_DAY*7) addContributionToCateg(datetime.datetime.now(), 'proposal') if contribType == 'Sighting': sightingWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), 1) - logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'))) + self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), util.ONE_DAY*7) self.addContributionToCateg(datetime.datetime.now(), 'sighting', orgName) @@ -240,11 +247,11 @@ class Contributor_helper: if contribType == 'Event': eventWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), 1) - logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'))) + self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), util.ONE_DAY*7) eventMonthCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), 1) - logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'))) + self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), util.ONE_DAY*7) # getRequirement parameters @@ -287,7 +294,7 @@ class Contributor_helper: for rankReq, ttl in contrib: self.serv_redis_db.set(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), 1) - logger.debug('Set: keyname={}'.format(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)))) + self.logger.debug('Set: keyname={}'.format(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)))) self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), ttl) ContributionStatus = self.getCurrentContributionStatus(orgName) @@ -335,12 +342,12 @@ class Contributor_helper: def giveBadgeToOrg(self, org, badgeNum): keyname = '{mainKey}:{org}:{orgCateg}' self.serv_redis_db.set(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum)), 1) - logger.debug('Giving badge {} to org {}'.format(org, badgeNum)) + self.logger.debug('Giving badge {} to org {}'.format(org, badgeNum)) def removeBadgeFromOrg(self, org, badgeNum): keyname = '{mainKey}:{org}:{orgCateg}' self.serv_redis_db.delete(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum))) - logger.debug('Removing badge {} from org {}'.format(org, badgeNum)) + self.logger.debug('Removing badge {} from org {}'.format(org, badgeNum)) ''' TROPHIES ''' def getOrgTrophies(self, org): @@ -391,12 +398,12 @@ class Contributor_helper: def giveTrophyPointsToOrg(self, org, categ, points): keyname = '{mainKey}:{orgCateg}' self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, points) - logger.debug('Giving {} trophy points to {} in {}'.format(points, org, categ)) + self.logger.debug('Giving {} trophy points to {} in {}'.format(points, org, categ)) def removeTrophyPointsFromOrg(self, org, categ, points): keyname = '{mainKey}:{orgCateg}' self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, -points) - logger.debug('Removing {} trophy points from {} in {}'.format(points, org, categ)) + self.logger.debug('Removing {} trophy points from {} in {}'.format(points, org, categ)) ''' AWARDS HELPER ''' def getLastAwardsFromRedis(self): diff --git a/helpers/geo_helper.py b/helpers/geo_helper.py index 17d7241..be222a5 100644 --- a/helpers/geo_helper.py +++ b/helpers/geo_helper.py @@ -13,9 +13,6 @@ from phonenumbers import geocoder import util -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) - class Geo_helper: def __init__(self, serv_redis_db, cfg): self.serv_redis_db = serv_redis_db @@ -25,6 +22,15 @@ class Geo_helper: port=cfg.getint('RedisGlobal', 'port'), db=cfg.getint('RedisMap', 'db')) + #logger + logDir = cfg.get('Log', 'directory') + logfilename = cfg.get('Log', 'filename') + logPath = os.path.join(logDir, logfilename) + if not os.path.exists(logDir): + os.makedirs(logDir) + logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + self.logger = logging.getLogger(__name__) + self.keyCategCoord = "GEO_COORD" self.keyCategCountry = "GEO_COUNTRY" self.keyCategRad = "GEO_RAD" @@ -106,22 +112,22 @@ class Geo_helper: "regionCode": rep['full_rep'].country.iso_code, } self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send)) - logger.info('Published: {}'.format(json.dumps(to_send))) + self.logger.info('Published: {}'.format(json.dumps(to_send))) except ValueError: - logger.warning("can't resolve ip") + self.logger.warning("can't resolve ip") except geoip2.errors.AddressNotFoundError: - logger.warning("Address not in Database") + self.logger.warning("Address not in Database") def getCoordFromPhoneAndPublish(self, phoneNumber, categ): try: rep = phonenumbers.parse(phoneNumber, None) if not (phonenumbers.is_valid_number(rep) or phonenumbers.is_possible_number(rep)): - logger.warning("Phone number not valid") + self.logger.warning("Phone number not valid") return country_name = geocoder.country_name_for_number(rep, "en") country_code = self.country_to_iso[country_name] if country_code is None: - logger.warning("Non matching ISO_CODE") + self.logger.warning("Non matching ISO_CODE") return coord = self.country_code_to_coord[country_code.lower()] # countrycode is in upper case coord_dic = {'lat': coord['lat'], 'lon': coord['long']} @@ -146,9 +152,9 @@ class Geo_helper: "regionCode": country_code, } self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send)) - logger.info('Published: {}'.format(json.dumps(to_send))) + self.logger.info('Published: {}'.format(json.dumps(to_send))) except phonenumbers.NumberParseException: - logger.warning("Can't resolve phone number country") + self.logger.warning("Can't resolve phone number country") ''' UTIL ''' def push_to_redis_geo(self, keyCateg, lon, lat, content): @@ -156,13 +162,13 @@ class Geo_helper: today_str = util.getDateStrFormat(now) keyname = "{}:{}".format(keyCateg, today_str) self.serv_redis_db.geoadd(keyname, lon, lat, content) - logger.debug('Added to redis: keyname={}, lon={}, lat={}, content={}'.format(keyname, lon, lat, content)) + self.logger.debug('Added to redis: keyname={}, lon={}, lat={}, content={}'.format(keyname, lon, lat, content)) def push_to_redis_zset(self, keyCateg, toAdd, endSubkey="", count=1): now = datetime.datetime.now() today_str = util.getDateStrFormat(now) keyname = "{}:{}{}".format(keyCateg, today_str, endSubkey) self.serv_redis_db.zincrby(keyname, toAdd, count) - logger.debug('Added to redis: keyname={}, toAdd={}, count={}'.format(keyname, toAdd, count)) + self.logger.debug('Added to redis: keyname={}, toAdd={}, count={}'.format(keyname, toAdd, count)) def ip_to_coord(self, ip): resp = self.reader.city(ip) diff --git a/helpers/trendings_helper.py b/helpers/trendings_helper.py index b99803e..298a43f 100644 --- a/helpers/trendings_helper.py +++ b/helpers/trendings_helper.py @@ -7,9 +7,6 @@ from collections import OrderedDict import util -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) - class Trendings_helper: def __init__(self, serv_redis_db, cfg): self.serv_redis_db = serv_redis_db @@ -23,6 +20,15 @@ class Trendings_helper: self.keySigh = "TRENDINGS_SIGHT_sightings" self.keyFalse = "TRENDINGS_SIGHT_false_positive" + #logger + logDir = cfg.get('Log', 'directory') + logfilename = cfg.get('Log', 'filename') + logPath = os.path.join(logDir, logfilename) + if not os.path.exists(logDir): + os.makedirs(logDir) + logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + self.logger = logging.getLogger(__name__) + ''' SETTER ''' def addGenericTrending(self, trendingType, data, timestamp): @@ -34,7 +40,7 @@ class Trendings_helper: else: to_save = data self.serv_redis_db.zincrby(keyname, to_save, 1) - logger.debug('Added to redis: keyname={}, content={}'.format(keyname, to_save)) + self.logger.debug('Added to redis: keyname={}, content={}'.format(keyname, to_save)) def addTrendingEvent(self, eventName, timestamp): self.addGenericTrending(self.keyEvent, eventName, timestamp) @@ -58,14 +64,14 @@ class Trendings_helper: timestampDate_str = util.getDateStrFormat(timestampDate) keyname = "{}:{}".format(self.keySigh, timestampDate_str) self.serv_redis_db.incrby(keyname, 1) - logger.debug('Incrby: keyname={}'.format(keyname)) + self.logger.debug('Incrby: keyname={}'.format(keyname)) def addFalsePositive(self, timestamp): timestampDate = datetime.datetime.fromtimestamp(float(timestamp)) timestampDate_str = util.getDateStrFormat(timestampDate) keyname = "{}:{}".format(self.keyFalse, timestampDate_str) self.serv_redis_db.incrby(keyname, 1) - logger.debug('Incrby: keyname={}'.format(keyname)) + self.logger.debug('Incrby: keyname={}'.format(keyname)) ''' GETTER ''' diff --git a/helpers/users_helper.py b/helpers/users_helper.py index 81cba1e..e435b34 100644 --- a/helpers/users_helper.py +++ b/helpers/users_helper.py @@ -7,8 +7,6 @@ import logging import util from . import contributor_helper -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) -logger = logging.getLogger(__name__) class Users_helper: def __init__(self, serv_redis_db, cfg): @@ -20,12 +18,21 @@ class Users_helper: self.keyOrgLog = "LOGIN_ORG" self.keyContribDay = contributor_helper.KEYDAY # Key to get monthly contribution + #logger + logDir = cfg.get('Log', 'directory') + logfilename = cfg.get('Log', 'filename') + logPath = os.path.join(logDir, logfilename) + if not os.path.exists(logDir): + os.makedirs(logDir) + logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + self.logger = logging.getLogger(__name__) + def addTemporary(self, org, timestamp): timestampDate = datetime.datetime.fromtimestamp(float(timestamp)) timestampDate_str = util.getDateHoursStrFormat(timestampDate) keyname_timestamp = "{}:{}".format(self.keyTimestampSet, timestampDate_str) self.serv_redis_db.sadd(keyname_timestamp, org) - logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, org)) + self.logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, org)) self.serv_redis_db.expire(keyname_timestamp, 60*60) def hasAlreadyBeenAdded(self, org, timestamp): @@ -44,12 +51,12 @@ class Users_helper: if not self.hasAlreadyBeenAdded(org, timestamp): keyname_timestamp = "{}:{}".format(self.keyTimestamp, timestampDate_str) self.serv_redis_db.sadd(keyname_timestamp, timestamp) - logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, timestamp)) + self.logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, timestamp)) self.addTemporary(org, timestamp) keyname_org = "{}:{}".format(self.keyOrgLog, timestampDate_str) self.serv_redis_db.zincrby(keyname_org, org, 1) - logger.debug('Added to redis: keyname={}, org={}'.format(keyname_org, org)) + self.logger.debug('Added to redis: keyname={}, org={}'.format(keyname_org, org)) def getUserLogins(self, date): keyname = "{}:{}".format(self.keyTimestamp, util.getDateStrFormat(date)) diff --git a/server.py b/server.py index ed636d4..6c2c0a2 100755 --- a/server.py +++ b/server.py @@ -65,7 +65,7 @@ class LogItem(): FIELDNAME_ORDER_HEADER = [] FIELDNAME_ORDER.append("Time") FIELDNAME_ORDER_HEADER.append("Time") - for item in json.loads(cfg.get('Log', 'fieldname_order')): + for item in json.loads(cfg.get('Dashboard', 'fieldname_order')): if type(item) is list: FIELDNAME_ORDER_HEADER.append(" | ".join(item)) else: diff --git a/zmq_dispatcher.py b/zmq_dispatcher.py index 2324bed..162df4f 100755 --- a/zmq_dispatcher.py +++ b/zmq_dispatcher.py @@ -22,7 +22,12 @@ configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logDir = cfg.get('Log', 'directory') +logfilename = cfg.get('Log', 'filename') +logPath = os.path.join(logDir, logfilename) +if not os.path.exists(logDir): + os.makedirs(logDir) +logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) logger = logging.getLogger('zmq_dispatcher') CHANNEL = cfg.get('RedisLog', 'channel') @@ -193,7 +198,7 @@ def handler_attribute(zmq_name, jsonobj, hasAlreadyBeenContributed=False): trendings_helper.addTrendingTags(tags, timestamp) to_push = [] - for field in json.loads(cfg.get('Log', 'fieldname_order')): + for field in json.loads(cfg.get('Dashboard', 'fieldname_order')): if type(field) is list: to_join = [] for subField in field: diff --git a/zmq_subscriber.py b/zmq_subscriber.py index 281490c..577afee 100755 --- a/zmq_subscriber.py +++ b/zmq_subscriber.py @@ -13,8 +13,12 @@ import json configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg') cfg = configparser.ConfigParser() cfg.read(configfile) - -logging.basicConfig(filename='logs/logs.log', filemode='w', level=logging.INFO) +logDir = cfg.get('Log', 'directory') +logfilename = cfg.get('Log', 'filename') +logPath = os.path.join(logDir, logfilename) +if not os.path.exists(logDir): + os.makedirs(logDir) +logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) logger = logging.getLogger('zmq_subscriber') ZMQ_URL = cfg.get('RedisGlobal', 'zmq_url') From 2034941d646546360ba7a58a8764533416e9292b Mon Sep 17 00:00:00 2001 From: Sami Mokaddem Date: Tue, 5 Dec 2017 10:32:12 +0100 Subject: [PATCH 5/6] Append to log instead of replacing --- helpers/contributor_helper.py | 2 +- helpers/geo_helper.py | 2 +- helpers/trendings_helper.py | 2 +- helpers/users_helper.py | 2 +- zmq_dispatcher.py | 2 +- zmq_subscriber.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/helpers/contributor_helper.py b/helpers/contributor_helper.py index ae446c6..42ff5cf 100644 --- a/helpers/contributor_helper.py +++ b/helpers/contributor_helper.py @@ -33,7 +33,7 @@ class Contributor_helper: logPath = os.path.join(logDir, logfilename) if not os.path.exists(logDir): os.makedirs(logDir) - logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO) self.logger = logging.getLogger(__name__) #honorBadge diff --git a/helpers/geo_helper.py b/helpers/geo_helper.py index be222a5..8169fe8 100644 --- a/helpers/geo_helper.py +++ b/helpers/geo_helper.py @@ -28,7 +28,7 @@ class Geo_helper: logPath = os.path.join(logDir, logfilename) if not os.path.exists(logDir): os.makedirs(logDir) - logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO) self.logger = logging.getLogger(__name__) self.keyCategCoord = "GEO_COORD" diff --git a/helpers/trendings_helper.py b/helpers/trendings_helper.py index 298a43f..b85caca 100644 --- a/helpers/trendings_helper.py +++ b/helpers/trendings_helper.py @@ -26,7 +26,7 @@ class Trendings_helper: logPath = os.path.join(logDir, logfilename) if not os.path.exists(logDir): os.makedirs(logDir) - logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO) self.logger = logging.getLogger(__name__) ''' SETTER ''' diff --git a/helpers/users_helper.py b/helpers/users_helper.py index e435b34..c865f21 100644 --- a/helpers/users_helper.py +++ b/helpers/users_helper.py @@ -24,7 +24,7 @@ class Users_helper: logPath = os.path.join(logDir, logfilename) if not os.path.exists(logDir): os.makedirs(logDir) - logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) + logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO) self.logger = logging.getLogger(__name__) def addTemporary(self, org, timestamp): diff --git a/zmq_dispatcher.py b/zmq_dispatcher.py index 162df4f..09a21fd 100755 --- a/zmq_dispatcher.py +++ b/zmq_dispatcher.py @@ -27,7 +27,7 @@ logfilename = cfg.get('Log', 'filename') logPath = os.path.join(logDir, logfilename) if not os.path.exists(logDir): os.makedirs(logDir) -logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) +logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO) logger = logging.getLogger('zmq_dispatcher') CHANNEL = cfg.get('RedisLog', 'channel') diff --git a/zmq_subscriber.py b/zmq_subscriber.py index 577afee..ab69e93 100755 --- a/zmq_subscriber.py +++ b/zmq_subscriber.py @@ -18,7 +18,7 @@ logfilename = cfg.get('Log', 'filename') logPath = os.path.join(logDir, logfilename) if not os.path.exists(logDir): os.makedirs(logDir) -logging.basicConfig(filename=logPath, filemode='w', level=logging.INFO) +logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO) logger = logging.getLogger('zmq_subscriber') ZMQ_URL = cfg.get('RedisGlobal', 'zmq_url') From 968757f81b6fc1ab098feeb20cac46d8667decaf Mon Sep 17 00:00:00 2001 From: Sami Mokaddem Date: Tue, 5 Dec 2017 11:04:26 +0100 Subject: [PATCH 6/6] fix: now take correct config fields + Removed useless field in config --- config/config.cfg.default | 3 +-- server.py | 2 +- zmq_dispatcher.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/config/config.cfg.default b/config/config.cfg.default index 6f2c027..9513db2 100644 --- a/config/config.cfg.default +++ b/config/config.cfg.default @@ -6,12 +6,11 @@ rotation_wait_time = 30 max_img_rotation = 10 hours_spanned = 48 zoomlevel = 15 -item_to_plot = Attribute.category # [1->12] size_dashboard_left_width = 5 size_openStreet_pannel_perc = 55 size_world_pannel_perc = 35 -field_to_plot = Attribute.category +item_to_plot = Attribute.category fieldname_order=["Event.id", "Attribute.Tag", "Attribute.category", "Attribute.type", ["Attribute.value", "Attribute.comment"]] char_separator=|| diff --git a/server.py b/server.py index 6c2c0a2..f0ebb99 100755 --- a/server.py +++ b/server.py @@ -136,7 +136,7 @@ def index(): size_dashboard_width=[cfg.getint('Dashboard' ,'size_dashboard_left_width'), 12-cfg.getint('Dashboard', 'size_dashboard_left_width')], itemToPlot=cfg.get('Dashboard', 'item_to_plot'), graph_log_refresh_rate=cfg.getint('Dashboard' ,'graph_log_refresh_rate'), - char_separator=cfg.get('Log', 'char_separator'), + char_separator=cfg.get('Dashboard', 'char_separator'), rotation_wait_time=cfg.getint('Dashboard' ,'rotation_wait_time'), max_img_rotation=cfg.getint('Dashboard' ,'max_img_rotation'), hours_spanned=cfg.getint('Dashboard' ,'hours_spanned'), diff --git a/zmq_dispatcher.py b/zmq_dispatcher.py index 09a21fd..5e3b14c 100755 --- a/zmq_dispatcher.py +++ b/zmq_dispatcher.py @@ -203,7 +203,7 @@ def handler_attribute(zmq_name, jsonobj, hasAlreadyBeenContributed=False): to_join = [] for subField in field: to_join.append(getFields(jsonobj, subField)) - to_add = cfg.get('Log', 'char_separator').join(to_join) + to_add = cfg.get('Dashboard', 'char_separator').join(to_join) else: to_add = getFields(jsonobj, field) to_push.append(to_add)