2019-05-29 01:42:39 +02:00
|
|
|
import datetime
|
2018-09-29 21:57:29 +02:00
|
|
|
import json
|
|
|
|
import logging
|
2019-05-29 01:42:39 +02:00
|
|
|
import os
|
|
|
|
import random
|
2019-06-01 08:03:31 +02:00
|
|
|
import sys
|
2019-05-29 01:42:39 +02:00
|
|
|
import time
|
2018-09-29 21:57:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Live_helper:
|
|
|
|
def __init__(self, serv_live, cfg):
|
|
|
|
self.serv_live = serv_live
|
|
|
|
self.cfg = cfg
|
|
|
|
self.maxCacheHistory = cfg.get('Dashboard', 'maxCacheHistory')
|
|
|
|
# REDIS keys
|
|
|
|
self.CHANNEL = cfg.get('RedisLog', 'channel')
|
|
|
|
self.prefix_redis_key = "TEMP_CACHE_LIVE:"
|
|
|
|
|
|
|
|
# logger
|
|
|
|
logDir = cfg.get('Log', 'directory')
|
2019-05-29 03:41:43 +02:00
|
|
|
logfilename = cfg.get('Log', 'helpers_filename')
|
2018-09-29 21:57:29 +02:00
|
|
|
logPath = os.path.join(logDir, logfilename)
|
|
|
|
if not os.path.exists(logDir):
|
|
|
|
os.makedirs(logDir)
|
2019-05-29 03:41:43 +02:00
|
|
|
try:
|
|
|
|
logging.basicConfig(filename=logPath, filemode='a', level=logging.INFO)
|
|
|
|
except PermissionError as error:
|
|
|
|
print(error)
|
|
|
|
print("Please fix the above and try again.")
|
|
|
|
sys.exit(126)
|
2018-09-29 21:57:29 +02:00
|
|
|
self.logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
def publish_log(self, zmq_name, name, content, channel=None):
|
|
|
|
channel = channel if channel is not None else self.CHANNEL
|
|
|
|
to_send = { 'name': name, 'log': json.dumps(content), 'zmqName': zmq_name }
|
|
|
|
to_send_keep = { 'name': name, 'log': content, 'zmqName': zmq_name }
|
|
|
|
j_to_send = json.dumps(to_send)
|
|
|
|
j_to_send_keep = json.dumps(to_send_keep)
|
|
|
|
self.serv_live.publish(channel, j_to_send)
|
|
|
|
self.logger.debug('Published: {}'.format(j_to_send))
|
|
|
|
if name != 'Keepalive':
|
2019-02-22 15:16:50 +01:00
|
|
|
name = 'Attribute' if 'ObjectAttribute' else name
|
2018-09-29 21:57:29 +02:00
|
|
|
self.add_to_stream_log_cache(name, j_to_send_keep)
|
|
|
|
|
|
|
|
|
|
|
|
def get_stream_log_cache(self, cacheKey):
|
|
|
|
rKey = self.prefix_redis_key+cacheKey
|
|
|
|
entries = self.serv_live.lrange(rKey, 0, -1)
|
|
|
|
to_ret = []
|
|
|
|
for entry in entries:
|
2019-06-19 11:32:06 +02:00
|
|
|
jentry = json.loads(entry)
|
2018-09-29 21:57:29 +02:00
|
|
|
to_ret.append(jentry)
|
|
|
|
return to_ret
|
2019-02-22 10:41:54 +01:00
|
|
|
|
2018-09-29 21:57:29 +02:00
|
|
|
|
|
|
|
def add_to_stream_log_cache(self, cacheKey, item):
|
|
|
|
rKey = self.prefix_redis_key+cacheKey
|
|
|
|
if type(item) != str:
|
|
|
|
item = json.dumps(item)
|
|
|
|
self.serv_live.lpush(rKey, item)
|
|
|
|
r = random.randint(0, 8)
|
|
|
|
if r == 0:
|
|
|
|
self.serv_live.ltrim(rKey, 0, 100)
|