Merge pull request #16 from mokaddem/modules

Logger
pull/22/head
Alexandre Dulaunoy 2017-12-16 09:40:12 +01:00 committed by GitHub
commit 34451efc04
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 127 additions and 40 deletions

View File

@ -10,11 +10,13 @@ rotation_wait_time = 30
max_img_rotation = 10
hours_spanned = 48
zoomlevel = 15
item_to_plot = Attribute.category
# [1->12]
size_dashboard_left_width = 5
size_openStreet_pannel_perc = 55
size_world_pannel_perc = 35
item_to_plot = Attribute.category
fieldname_order=["Event.id", "Attribute.Tag", "Attribute.category", "Attribute.type", ["Attribute.value", "Attribute.comment"]]
char_separator=||
[GEO]
#min
@ -29,9 +31,8 @@ min_between_reload = 5
additional_help_text = ["Sightings multiplies earned points by 2", "Editing an attribute earns you the same as creating one"]
[Log]
field_to_plot = Attribute.category
fieldname_order=["Event.id", "Attribute.Tag", "Attribute.category", "Attribute.type", ["Attribute.value", "Attribute.comment"]]
char_separator=||
directory=logs
filename=logs.log
[RedisGlobal]
host=localhost

View File

@ -6,10 +6,11 @@ import os
import configparser
import json
import datetime
import logging
import redis
import util
import users_helper
from . import users_helper
KEYDAY = "CONTRIB_DAY" # To be used by other module
class Contributor_helper:
@ -26,6 +27,15 @@ class Contributor_helper:
self.CHANNEL_LASTCONTRIB = cfg.get('RedisLog', 'channelLastContributor')
self.users_helper = users_helper.Users_helper(serv_redis_db, cfg)
#logger
logDir = cfg.get('Log', 'directory')
logfilename = cfg.get('Log', 'filename')
logPath = os.path.join(logDir, logfilename)
if not os.path.exists(logDir):
os.makedirs(logDir)
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
self.logger = logging.getLogger(__name__)
#honorBadge
self.honorBadgeNum = len(self.cfg_org_rank.options('HonorBadge'))
self.heavilyCount = self.cfg_org_rank.getint('rankRequirementsMisc', 'heavilyCount')
@ -95,10 +105,12 @@ class Contributor_helper:
today_str = util.getDateStrFormat(date)
keyname = "{}:{}:{}".format(self.keyCateg, today_str, categ)
self.serv_redis_db.zincrby(keyname, org, count)
self.logger.debug('Added to redis: keyname={}, org={}, count={}'.format(keyname, org, count))
def publish_log(self, zmq_name, name, content, channel=""):
to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name }
self.serv_log.publish(channel, json.dumps(to_send))
self.logger.debug('Published: {}'.format(json.dumps(to_send)))
''' HANDLER '''
#pntMultiplier if one contribution rewards more than others. (e.g. shighting may gives more points than editing)
@ -111,7 +123,7 @@ class Contributor_helper:
nowSec = int(time.time())
pnts_to_add = self.default_pnts_per_contribution
# if there is a contribution, there is a login (even if ti comes from the API)
# if there is a contribution, there is a login (even if it comes from the API)
self.users_helper.add_user_login(nowSec, org)
# is a valid contribution
@ -133,6 +145,7 @@ class Contributor_helper:
keyname = "{}:{}".format(self.keyLastContrib, util.getDateStrFormat(now))
self.serv_redis_db.zadd(keyname, nowSec, org)
self.logger.debug('Added to redis: keyname={}, nowSec={}, org={}'.format(keyname, nowSec, org))
self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day
awards_given = self.updateOrgContributionRank(org, pnts_to_add, action, contribType, eventTime=datetime.datetime.now(), isLabeled=isLabeled, categ=util.noSpaceLower(categ))
@ -141,6 +154,7 @@ class Contributor_helper:
# update awards given
keyname = "{}:{}".format(self.keyLastAward, util.getDateStrFormat(now))
self.serv_redis_db.zadd(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec }))
self.logger.debug('Added to redis: keyname={}, nowSec={}, content={}'.format(keyname, nowSec, json.dumps({'org': org, 'award': award, 'epoch': nowSec })))
self.serv_redis_db.expire(keyname, util.ONE_DAY*7) #expire after 7 day
# publish
self.publish_log(zmq_name, 'CONTRIBUTION', {'org': org, 'award': award, 'epoch': nowSec }, channel=self.CHANNEL_LASTAWARDS)
@ -214,14 +228,17 @@ class Contributor_helper:
if contribType == 'Attribute':
attributeWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), 1)
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT'), util.ONE_DAY*7)
self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='ATTR_WEEK_COUNT')))
if contribType == 'Proposal':
proposalWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), 1)
self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='PROP_WEEK_COUNT'), util.ONE_DAY*7)
addContributionToCateg(datetime.datetime.now(), 'proposal')
if contribType == 'Sighting':
sightingWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), 1)
self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='SIGHT_WEEK_COUNT'), util.ONE_DAY*7)
self.addContributionToCateg(datetime.datetime.now(), 'sighting', orgName)
@ -230,9 +247,11 @@ class Contributor_helper:
if contribType == 'Event':
eventWeekCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), 1)
self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_WEEK_COUNT'), util.ONE_DAY*7)
eventMonthCount = self.serv_redis_db.incrby(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), 1)
self.logger.debug('Incrby: keyname={}'.format(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT')))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='EVENT_MONTH_COUNT'), util.ONE_DAY*7)
# getRequirement parameters
@ -275,6 +294,7 @@ class Contributor_helper:
for rankReq, ttl in contrib:
self.serv_redis_db.set(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), 1)
self.logger.debug('Set: keyname={}'.format(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq))))
self.serv_redis_db.expire(keyname.format(org=orgName, orgCateg='CONTRIB_REQ_'+str(rankReq)), ttl)
ContributionStatus = self.getCurrentContributionStatus(orgName)
@ -322,10 +342,12 @@ class Contributor_helper:
def giveBadgeToOrg(self, org, badgeNum):
keyname = '{mainKey}:{org}:{orgCateg}'
self.serv_redis_db.set(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum)), 1)
self.logger.debug('Giving badge {} to org {}'.format(org, badgeNum))
def removeBadgeFromOrg(self, org, badgeNum):
keyname = '{mainKey}:{org}:{orgCateg}'
self.serv_redis_db.delete(keyname.format(mainKey=self.keyContribReq, org=org, orgCateg='BADGE_'+str(badgeNum)))
self.logger.debug('Removing badge {} from org {}'.format(org, badgeNum))
''' TROPHIES '''
def getOrgTrophies(self, org):
@ -360,7 +382,6 @@ class Contributor_helper:
def posToRankMapping(self, pos, totNum):
mapping = self.trophyMapping
mapping_num = [math.ceil(float(float(totNum*i)/float(100))) for i in mapping]
# print(pos, totNum)
if pos == 0: #first
position = 1
else:
@ -377,10 +398,12 @@ class Contributor_helper:
def giveTrophyPointsToOrg(self, org, categ, points):
keyname = '{mainKey}:{orgCateg}'
self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, points)
self.logger.debug('Giving {} trophy points to {} in {}'.format(points, org, categ))
def removeTrophyPointsFromOrg(self, org, categ, points):
keyname = '{mainKey}:{orgCateg}'
self.serv_redis_db.zincrby(keyname.format(mainKey=self.keyTrophy, orgCateg=categ), org, -points)
self.logger.debug('Removing {} trophy points from {} in {}'.format(points, org, categ))
''' AWARDS HELPER '''
def getLastAwardsFromRedis(self):

View File

@ -2,6 +2,7 @@ import math, random
import os
import json
import datetime, time
import logging
import json
import redis
from collections import OrderedDict
@ -21,6 +22,15 @@ class Geo_helper:
port=cfg.getint('RedisGlobal', 'port'),
db=cfg.getint('RedisMap', 'db'))
#logger
logDir = cfg.get('Log', 'directory')
logfilename = cfg.get('Log', 'filename')
logPath = os.path.join(logDir, logfilename)
if not os.path.exists(logDir):
os.makedirs(logDir)
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.keyCategCoord = "GEO_COORD"
self.keyCategCountry = "GEO_COUNTRY"
self.keyCategRad = "GEO_RAD"
@ -102,21 +112,22 @@ class Geo_helper:
"regionCode": rep['full_rep'].country.iso_code,
}
self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send))
self.logger.info('Published: {}'.format(json.dumps(to_send)))
except ValueError:
print("can't resolve ip")
self.logger.warning("can't resolve ip")
except geoip2.errors.AddressNotFoundError:
print("Address not in Database")
self.logger.warning("Address not in Database")
def getCoordFromPhoneAndPublish(self, phoneNumber, categ):
try:
rep = phonenumbers.parse(phoneNumber, None)
if not (phonenumbers.is_valid_number(rep) or phonenumbers.is_possible_number(rep)):
print("Phone number not valid")
self.logger.warning("Phone number not valid")
return
country_name = geocoder.country_name_for_number(rep, "en")
country_code = self.country_to_iso[country_name]
if country_code is None:
print("Non matching ISO_CODE")
self.logger.warning("Non matching ISO_CODE")
return
coord = self.country_code_to_coord[country_code.lower()] # countrycode is in upper case
coord_dic = {'lat': coord['lat'], 'lon': coord['long']}
@ -141,8 +152,9 @@ class Geo_helper:
"regionCode": country_code,
}
self.serv_coord.publish(self.CHANNELDISP, json.dumps(to_send))
self.logger.info('Published: {}'.format(json.dumps(to_send)))
except phonenumbers.NumberParseException:
print("Can't resolve phone number country")
self.logger.warning("Can't resolve phone number country")
''' UTIL '''
def push_to_redis_geo(self, keyCateg, lon, lat, content):
@ -150,12 +162,13 @@ class Geo_helper:
today_str = util.getDateStrFormat(now)
keyname = "{}:{}".format(keyCateg, today_str)
self.serv_redis_db.geoadd(keyname, lon, lat, content)
self.logger.debug('Added to redis: keyname={}, lon={}, lat={}, content={}'.format(keyname, lon, lat, content))
def push_to_redis_zset(self, keyCateg, toAdd, endSubkey="", count=1):
now = datetime.datetime.now()
today_str = util.getDateStrFormat(now)
keyname = "{}:{}{}".format(keyCateg, today_str, endSubkey)
self.serv_redis_db.zincrby(keyname, toAdd, count)
self.logger.debug('Added to redis: keyname={}, toAdd={}, count={}'.format(keyname, toAdd, count))
def ip_to_coord(self, ip):
resp = self.reader.city(ip)

View File

@ -2,6 +2,7 @@ import math, random
import os
import json
import datetime, time
import logging
from collections import OrderedDict
import util
@ -19,6 +20,15 @@ class Trendings_helper:
self.keySigh = "TRENDINGS_SIGHT_sightings"
self.keyFalse = "TRENDINGS_SIGHT_false_positive"
#logger
logDir = cfg.get('Log', 'directory')
logfilename = cfg.get('Log', 'filename')
logPath = os.path.join(logDir, logfilename)
if not os.path.exists(logDir):
os.makedirs(logDir)
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
self.logger = logging.getLogger(__name__)
''' SETTER '''
def addGenericTrending(self, trendingType, data, timestamp):
@ -30,6 +40,7 @@ class Trendings_helper:
else:
to_save = data
self.serv_redis_db.zincrby(keyname, to_save, 1)
self.logger.debug('Added to redis: keyname={}, content={}'.format(keyname, to_save))
def addTrendingEvent(self, eventName, timestamp):
self.addGenericTrending(self.keyEvent, eventName, timestamp)
@ -53,12 +64,14 @@ class Trendings_helper:
timestampDate_str = util.getDateStrFormat(timestampDate)
keyname = "{}:{}".format(self.keySigh, timestampDate_str)
self.serv_redis_db.incrby(keyname, 1)
self.logger.debug('Incrby: keyname={}'.format(keyname))
def addFalsePositive(self, timestamp):
timestampDate = datetime.datetime.fromtimestamp(float(timestamp))
timestampDate_str = util.getDateStrFormat(timestampDate)
keyname = "{}:{}".format(self.keyFalse, timestampDate_str)
self.serv_redis_db.incrby(keyname, 1)
self.logger.debug('Incrby: keyname={}'.format(keyname))
''' GETTER '''

View File

@ -2,9 +2,11 @@ import math, random
import os
import json
import datetime, time
import logging
import util
import contributor_helper
from . import contributor_helper
class Users_helper:
def __init__(self, serv_redis_db, cfg):
@ -16,11 +18,21 @@ class Users_helper:
self.keyOrgLog = "LOGIN_ORG"
self.keyContribDay = contributor_helper.KEYDAY # Key to get monthly contribution
#logger
logDir = cfg.get('Log', 'directory')
logfilename = cfg.get('Log', 'filename')
logPath = os.path.join(logDir, logfilename)
if not os.path.exists(logDir):
os.makedirs(logDir)
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
self.logger = logging.getLogger(__name__)
def addTemporary(self, org, timestamp):
timestampDate = datetime.datetime.fromtimestamp(float(timestamp))
timestampDate_str = util.getDateHoursStrFormat(timestampDate)
keyname_timestamp = "{}:{}".format(self.keyTimestampSet, timestampDate_str)
self.serv_redis_db.sadd(keyname_timestamp, org)
self.logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, org))
self.serv_redis_db.expire(keyname_timestamp, 60*60)
def hasAlreadyBeenAdded(self, org, timestamp):
@ -39,10 +51,12 @@ class Users_helper:
if not self.hasAlreadyBeenAdded(org, timestamp):
keyname_timestamp = "{}:{}".format(self.keyTimestamp, timestampDate_str)
self.serv_redis_db.sadd(keyname_timestamp, timestamp)
self.logger.debug('Added to redis: keyname={}, org={}'.format(keyname_timestamp, timestamp))
self.addTemporary(org, timestamp)
keyname_org = "{}:{}".format(self.keyOrgLog, timestampDate_str)
self.serv_redis_db.zincrby(keyname_org, org, 1)
self.logger.debug('Added to redis: keyname={}, org={}'.format(keyname_org, org))
def getUserLogins(self, date):
keyname = "{}:{}".format(self.keyTimestamp, util.getDateStrFormat(date))

View File

@ -8,17 +8,21 @@ from time import gmtime as now
from time import sleep, strftime
import datetime
import os
import logging
import util
import geo_helper
import contributor_helper
import users_helper
import trendings_helper
from helpers import geo_helper
from helpers import contributor_helper
from helpers import users_helper
from helpers import trendings_helper
configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
cfg = configparser.ConfigParser()
cfg.read(configfile)
logger = logging.getLogger('werkzeug')
logger.setLevel(logging.ERROR)
server_host = cfg.get("Server", "host")
server_port = cfg.getint("Server", "port")
@ -64,7 +68,7 @@ class LogItem():
FIELDNAME_ORDER_HEADER = []
FIELDNAME_ORDER.append("Time")
FIELDNAME_ORDER_HEADER.append("Time")
for item in json.loads(cfg.get('Log', 'fieldname_order')):
for item in json.loads(cfg.get('Dashboard', 'fieldname_order')):
if type(item) is list:
FIELDNAME_ORDER_HEADER.append(" | ".join(item))
else:
@ -102,8 +106,8 @@ class EventMessage():
msg = msg.decode('utf8')
try:
jsonMsg = json.loads(msg)
except json.JSONDecodeError:
print('json decode error')
except json.JSONDecodeError as e:
logger.error(e)
jsonMsg = { 'name': "undefined" ,'log': json.loads(msg) }
self.feedName = jsonMsg['name']
@ -135,7 +139,7 @@ def index():
size_dashboard_width=[cfg.getint('Dashboard' ,'size_dashboard_left_width'), 12-cfg.getint('Dashboard', 'size_dashboard_left_width')],
itemToPlot=cfg.get('Dashboard', 'item_to_plot'),
graph_log_refresh_rate=cfg.getint('Dashboard' ,'graph_log_refresh_rate'),
char_separator=cfg.get('Log', 'char_separator'),
char_separator=cfg.get('Dashboard', 'char_separator'),
rotation_wait_time=cfg.getint('Dashboard' ,'rotation_wait_time'),
max_img_rotation=cfg.getint('Dashboard' ,'max_img_rotation'),
hours_spanned=cfg.getint('Dashboard' ,'hours_spanned'),

View File

@ -2,7 +2,7 @@
import time, datetime
import copy
from pprint import pprint
import logging
import zmq
import redis
import random
@ -13,15 +13,23 @@ import sys
import json
import util
import geo_helper
import contributor_helper
import users_helper
import trendings_helper
from helpers import geo_helper
from helpers import contributor_helper
from helpers import users_helper
from helpers import trendings_helper
configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
cfg = configparser.ConfigParser()
cfg.read(configfile)
logDir = cfg.get('Log', 'directory')
logfilename = cfg.get('Log', 'filename')
logPath = os.path.join(logDir, logfilename)
if not os.path.exists(logDir):
os.makedirs(logDir)
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
logger = logging.getLogger('zmq_dispatcher')
CHANNEL = cfg.get('RedisLog', 'channel')
LISTNAME = cfg.get('RedisLIST', 'listName')
@ -47,6 +55,7 @@ trendings_helper = trendings_helper.Trendings_helper(serv_redis_db, cfg)
def publish_log(zmq_name, name, content, channel=CHANNEL):
to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name }
serv_log.publish(channel, json.dumps(to_send))
logger.debug('Published: {}'.format(json.dumps(to_send)))
def getFields(obj, fields):
jsonWalker = fields.split('.')
@ -68,7 +77,7 @@ def getFields(obj, fields):
##############
def handler_log(zmq_name, jsonevent):
print('sending', 'log')
logger.info('Log not processed')
return
def handler_dispatcher(zmq_name, jsonObj):
@ -76,12 +85,12 @@ def handler_dispatcher(zmq_name, jsonObj):
handler_event(zmq_name, jsonObj)
def handler_keepalive(zmq_name, jsonevent):
print('sending', 'keepalive')
logger.info('Handling keepalive')
to_push = [ jsonevent['uptime'] ]
publish_log(zmq_name, 'Keepalive', to_push)
def handler_user(zmq_name, jsondata):
print('sending', 'user')
logger.info('Handling user')
action = jsondata['action']
json_user = jsondata['User']
json_org = jsondata['Organisation']
@ -93,11 +102,11 @@ def handler_user(zmq_name, jsondata):
pass
def handler_conversation(zmq_name, jsonevent):
logger.info('Handling conversation')
try: #only consider POST, not THREAD
jsonpost = jsonevent['Post']
except KeyError:
return
print('sending' ,'Post')
except KeyError as e:
logger.error('Error in handler_conversation: {}'.format(e))
org = jsonpost['org_name']
categ = None
action = 'add'
@ -112,11 +121,11 @@ def handler_conversation(zmq_name, jsonevent):
trendings_helper.addTrendingDisc(eventName, nowSec)
def handler_object(zmq_name, jsondata):
print('obj')
logger.info('Handling object')
return
def handler_sighting(zmq_name, jsondata):
print('sending' ,'sighting')
logger.info('Handling sighting')
jsonsight = jsondata['Sighting']
org = jsonsight['Event']['Orgc']['name']
categ = jsonsight['Attribute']['category']
@ -132,6 +141,7 @@ def handler_sighting(zmq_name, jsondata):
trendings_helper.addFalsePositive(timestamp)
def handler_event(zmq_name, jsonobj):
logger.info('Handling event')
#fields: threat_level_id, id, info
jsonevent = jsonobj['Event']
@ -170,6 +180,7 @@ def handler_event(zmq_name, jsonobj):
isLabeled=eventLabeled)
def handler_attribute(zmq_name, jsonobj, hasAlreadyBeenContributed=False):
logger.info('Handling attribute')
# check if jsonattr is an attribute object
if 'Attribute' in jsonobj:
jsonattr = jsonobj['Attribute']
@ -187,12 +198,12 @@ def handler_attribute(zmq_name, jsonobj, hasAlreadyBeenContributed=False):
trendings_helper.addTrendingTags(tags, timestamp)
to_push = []
for field in json.loads(cfg.get('Log', 'fieldname_order')):
for field in json.loads(cfg.get('Dashboard', 'fieldname_order')):
if type(field) is list:
to_join = []
for subField in field:
to_join.append(getFields(jsonobj, subField))
to_add = cfg.get('Log', 'char_separator').join(to_join)
to_add = cfg.get('Dashboard', 'char_separator').join(to_join)
else:
to_add = getFields(jsonobj, field)
to_push.append(to_add)
@ -227,7 +238,7 @@ def process_log(zmq_name, event):
try:
dico_action[topic](zmq_name, jsonevent)
except KeyError as e:
print(e)
logger.error(e)
def main(sleeptime):
@ -235,7 +246,7 @@ def main(sleeptime):
while True:
content = serv_list.rpop(LISTNAME)
if content is None:
print('Processed', numMsg, 'message(s) since last sleep.')
logger.debug('Processed {} message(s) since last sleep.'.format(numMsg))
numMsg = 0
time.sleep(sleeptime)
continue

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python3.5
import time, datetime
from pprint import pprint
import zmq
import logging
import redis
import configparser
import argparse
@ -13,6 +13,13 @@ import json
configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
cfg = configparser.ConfigParser()
cfg.read(configfile)
logDir = cfg.get('Log', 'directory')
logfilename = cfg.get('Log', 'filename')
logPath = os.path.join(logDir, logfilename)
if not os.path.exists(logDir):
os.makedirs(logDir)
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
logger = logging.getLogger('zmq_subscriber')
ZMQ_URL = cfg.get('RedisGlobal', 'zmq_url')
CHANNEL = cfg.get('RedisLog', 'channel')
@ -32,6 +39,7 @@ def put_in_redis_list(zmq_name, content):
content = content.decode('utf8')
to_add = {'zmq_name': zmq_name, 'content': content}
serv_list.lpush(LISTNAME, json.dumps(to_add))
logger.debug('Pushed: {}'.format(json.dumps(to_add)))
def main(zmqName):
context = zmq.Context()