2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-12-09 08:46:37 +01:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
'''
|
|
|
|
Flask functions and routes for the dashboard page
|
|
|
|
'''
|
2016-12-09 08:50:36 +01:00
|
|
|
import json
|
2018-07-25 16:48:44 +02:00
|
|
|
import os
|
2016-12-09 13:53:57 +01:00
|
|
|
import datetime
|
2018-07-26 10:34:43 +02:00
|
|
|
import time
|
2016-12-09 08:46:37 +01:00
|
|
|
import flask
|
2018-07-25 16:48:44 +02:00
|
|
|
|
|
|
|
from Date import Date
|
|
|
|
|
2018-07-26 10:34:43 +02:00
|
|
|
from flask import Flask, render_template, jsonify, request, Blueprint, url_for
|
2019-06-19 17:02:09 +02:00
|
|
|
|
|
|
|
from Role_Manager import login_admin, login_analyst
|
2019-05-03 16:52:05 +02:00
|
|
|
from flask_login import login_required
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
# ============ VARIABLES ============
|
|
|
|
import Flask_config
|
|
|
|
|
|
|
|
app = Flask_config.app
|
|
|
|
cfg = Flask_config.cfg
|
2018-09-20 10:38:19 +02:00
|
|
|
baseUrl = Flask_config.baseUrl
|
2016-12-09 08:46:37 +01:00
|
|
|
r_serv = Flask_config.r_serv
|
|
|
|
r_serv_log = Flask_config.r_serv_log
|
2019-04-15 14:30:28 +02:00
|
|
|
r_serv_db = Flask_config.r_serv_db
|
2017-04-19 11:02:03 +02:00
|
|
|
|
2018-07-26 11:35:54 +02:00
|
|
|
max_dashboard_logs = Flask_config.max_dashboard_logs
|
2019-04-18 10:56:00 +02:00
|
|
|
dict_update_description = Flask_config.dict_update_description
|
2018-07-26 11:35:54 +02:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
dashboard = Blueprint('dashboard', __name__, template_folder='templates')
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ FUNCTIONS ============
|
|
|
|
|
|
|
|
def event_stream():
|
|
|
|
pubsub = r_serv_log.pubsub()
|
|
|
|
pubsub.psubscribe("Script" + '.*')
|
|
|
|
for msg in pubsub.listen():
|
2018-05-04 13:53:29 +02:00
|
|
|
|
|
|
|
type = msg['type']
|
|
|
|
pattern = msg['pattern']
|
|
|
|
channel = msg['channel']
|
|
|
|
data = msg['data']
|
2018-04-17 16:06:32 +02:00
|
|
|
|
|
|
|
msg = {'channel': channel, 'type': type, 'pattern': pattern, 'data': data}
|
|
|
|
|
|
|
|
level = (msg['channel']).split('.')[1]
|
2016-12-09 08:46:37 +01:00
|
|
|
if msg['type'] == 'pmessage' and level != "DEBUG":
|
|
|
|
yield 'data: %s\n\n' % json.dumps(msg)
|
|
|
|
|
|
|
|
def get_queues(r):
|
|
|
|
# We may want to put the llen in a pipeline to do only one query.
|
|
|
|
newData = []
|
2018-04-17 16:06:32 +02:00
|
|
|
for queue, card in r.hgetall("queues").items():
|
2018-05-04 13:53:29 +02:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
key = "MODULE_" + queue + "_"
|
|
|
|
keySet = "MODULE_TYPE_" + queue
|
|
|
|
|
|
|
|
for moduleNum in r.smembers(keySet):
|
2018-04-17 16:06:32 +02:00
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
value = r.get(key + str(moduleNum))
|
2018-04-17 16:06:32 +02:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
if value is not None:
|
|
|
|
timestamp, path = value.split(", ")
|
|
|
|
if timestamp is not None:
|
|
|
|
startTime_readable = datetime.datetime.fromtimestamp(int(timestamp))
|
|
|
|
processed_time_readable = str((datetime.datetime.now() - startTime_readable)).split('.')[0]
|
|
|
|
seconds = int((datetime.datetime.now() - startTime_readable).total_seconds())
|
|
|
|
newData.append( (queue, card, seconds, moduleNum) )
|
|
|
|
else:
|
|
|
|
newData.append( (queue, cards, 0, moduleNum) )
|
|
|
|
|
|
|
|
return newData
|
|
|
|
|
2018-07-25 16:48:44 +02:00
|
|
|
def get_date_range(date_from, num_day):
|
|
|
|
date = Date(str(date_from[0:4])+str(date_from[4:6]).zfill(2)+str(date_from[6:8]).zfill(2))
|
|
|
|
date_list = []
|
|
|
|
|
|
|
|
for i in range(0, num_day+1):
|
|
|
|
new_date = date.substract_day(i)
|
|
|
|
date_list.append(new_date[0:4] +'-'+ new_date[4:6] +'-'+ new_date[6:8])
|
|
|
|
|
|
|
|
return date_list
|
|
|
|
|
|
|
|
def dashboard_alert(log):
|
|
|
|
# check if we need to display this log
|
|
|
|
if len(log)>50:
|
|
|
|
date = log[1:5]+log[6:8]+log[9:11]
|
2018-07-26 10:34:43 +02:00
|
|
|
utc_str = log[1:20]
|
2018-07-25 16:48:44 +02:00
|
|
|
log = log[46:].split(';')
|
|
|
|
if len(log) == 6:
|
2018-07-26 10:34:43 +02:00
|
|
|
time = datetime_from_utc_to_local(utc_str)
|
|
|
|
path = url_for('showsavedpastes.showsavedpaste',paste=log[5])
|
|
|
|
|
|
|
|
res = {'date': date, 'time': time, 'script': log[0], 'domain': log[1], 'date_paste': log[2],
|
|
|
|
'paste': log[3], 'message': log[4], 'path': path}
|
2018-07-25 16:48:44 +02:00
|
|
|
return res
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2018-07-26 10:34:43 +02:00
|
|
|
def datetime_from_utc_to_local(utc_str):
|
|
|
|
utc_datetime = datetime.datetime.strptime(utc_str, '%Y-%m-%d %H:%M:%S')
|
|
|
|
now_timestamp = time.time()
|
|
|
|
offset = datetime.datetime.fromtimestamp(now_timestamp) - datetime.datetime.utcfromtimestamp(now_timestamp)
|
|
|
|
local_time_str = (utc_datetime + offset).strftime('%H:%M:%S')
|
|
|
|
return local_time_str
|
2018-07-25 16:48:44 +02:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ ROUTES ============
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@dashboard.route("/_logs")
|
2019-05-03 16:52:05 +02:00
|
|
|
@login_required
|
2019-06-19 17:02:09 +02:00
|
|
|
@login_analyst
|
2016-12-09 08:46:37 +01:00
|
|
|
def logs():
|
|
|
|
return flask.Response(event_stream(), mimetype="text/event-stream")
|
|
|
|
|
2018-07-25 16:48:44 +02:00
|
|
|
@dashboard.route("/_get_last_logs_json")
|
2019-05-03 16:52:05 +02:00
|
|
|
@login_required
|
2019-06-19 17:02:09 +02:00
|
|
|
@login_analyst
|
2018-07-25 16:48:44 +02:00
|
|
|
def get_last_logs_json():
|
|
|
|
date = datetime.datetime.now().strftime("%Y%m%d")
|
|
|
|
|
|
|
|
max_day_search = 6
|
|
|
|
day_search = 0
|
|
|
|
warning_found = 0
|
2018-07-26 11:35:54 +02:00
|
|
|
warning_to_found = max_dashboard_logs
|
2018-07-25 16:48:44 +02:00
|
|
|
|
|
|
|
last_logs = []
|
|
|
|
|
|
|
|
date_range = get_date_range(date, max_day_search)
|
2018-07-26 10:34:43 +02:00
|
|
|
while max_day_search != day_search and warning_found != warning_to_found:
|
2018-07-25 16:48:44 +02:00
|
|
|
|
|
|
|
filename_warning_log = 'logs/Script_warn-'+ date_range[day_search] +'.log'
|
|
|
|
filename_log = os.path.join(os.environ['AIL_HOME'], filename_warning_log)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(filename_log, 'r') as f:
|
|
|
|
lines = f.read().splitlines()
|
|
|
|
curr_index = -1
|
|
|
|
while warning_found != warning_to_found:
|
|
|
|
try:
|
|
|
|
# get lasts warning logs
|
|
|
|
log_warn = dashboard_alert(lines[curr_index])
|
|
|
|
if log_warn != False:
|
|
|
|
last_logs.append(log_warn)
|
|
|
|
warning_found = warning_found + 1
|
|
|
|
curr_index = curr_index - 1
|
|
|
|
|
|
|
|
except IndexError:
|
|
|
|
# check previous warning log file
|
|
|
|
day_search = day_search + 1
|
|
|
|
break
|
|
|
|
except FileNotFoundError:
|
|
|
|
# check previous warning log file
|
|
|
|
day_search = day_search + 1
|
|
|
|
|
2018-07-26 10:34:43 +02:00
|
|
|
return jsonify(list(reversed(last_logs)))
|
2018-07-25 16:48:44 +02:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@dashboard.route("/_stuff", methods=['GET'])
|
2019-05-03 16:52:05 +02:00
|
|
|
@login_required
|
2019-06-19 17:02:09 +02:00
|
|
|
@login_analyst
|
2016-12-09 08:46:37 +01:00
|
|
|
def stuff():
|
|
|
|
return jsonify(row1=get_queues(r_serv))
|
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@dashboard.route("/")
|
2019-05-03 16:52:05 +02:00
|
|
|
@login_required
|
2019-06-19 17:02:09 +02:00
|
|
|
@login_analyst
|
2016-12-09 08:46:37 +01:00
|
|
|
def index():
|
|
|
|
default_minute = cfg.get("Flask", "minute_processed_paste")
|
|
|
|
threshold_stucked_module = cfg.getint("Module_ModuleInformation", "threshold_stucked_module")
|
2018-07-26 11:35:54 +02:00
|
|
|
log_select = {10, 25, 50, 100}
|
|
|
|
log_select.add(max_dashboard_logs)
|
|
|
|
log_select = list(log_select)
|
|
|
|
log_select.sort()
|
2019-04-18 10:56:00 +02:00
|
|
|
|
|
|
|
# Check if update in progress
|
2019-04-15 14:30:28 +02:00
|
|
|
update_in_progress = False
|
2019-04-18 11:01:11 +02:00
|
|
|
update_warning_message = ''
|
|
|
|
update_warning_message_notice_me = ''
|
2019-04-18 10:56:00 +02:00
|
|
|
current_update = r_serv_db.get('ail:current_background_update')
|
|
|
|
if current_update:
|
|
|
|
if r_serv_db.scard('ail:update_{}'.format(current_update)) != dict_update_description[current_update]['nb_background_update']:
|
2019-04-15 14:30:28 +02:00
|
|
|
update_in_progress = True
|
2019-04-18 10:56:00 +02:00
|
|
|
update_warning_message = dict_update_description[current_update]['update_warning_message']
|
|
|
|
update_warning_message_notice_me = dict_update_description[current_update]['update_warning_message_notice_me']
|
2019-04-15 14:30:28 +02:00
|
|
|
|
2018-07-26 11:35:54 +02:00
|
|
|
return render_template("index.html", default_minute = default_minute, threshold_stucked_module=threshold_stucked_module,
|
2019-04-18 10:56:00 +02:00
|
|
|
log_select=log_select, selected=max_dashboard_logs,
|
|
|
|
update_warning_message=update_warning_message, update_in_progress=update_in_progress,
|
|
|
|
update_warning_message_notice_me=update_warning_message_notice_me)
|
2017-04-19 11:02:03 +02:00
|
|
|
|
|
|
|
# ========= REGISTRATION =========
|
2018-09-20 10:38:19 +02:00
|
|
|
app.register_blueprint(dashboard, url_prefix=baseUrl)
|