2018-03-14 17:08:45 +01:00
|
|
|
#!/usr/bin/env python3
|
2017-09-11 14:53:06 +02:00
|
|
|
import configparser
|
2017-10-25 17:32:06 +02:00
|
|
|
import datetime
|
2019-05-29 02:09:14 +02:00
|
|
|
import errno
|
2019-05-29 01:30:57 +02:00
|
|
|
import json
|
2017-12-04 16:44:44 +01:00
|
|
|
import logging
|
2019-05-29 01:30:57 +02:00
|
|
|
import math
|
|
|
|
import os
|
|
|
|
import random
|
|
|
|
from time import gmtime as now
|
|
|
|
from time import sleep, strftime
|
|
|
|
|
|
|
|
import redis
|
2017-09-11 14:53:06 +02:00
|
|
|
|
2017-11-06 10:42:51 +01:00
|
|
|
import util
|
2019-05-29 01:30:57 +02:00
|
|
|
from flask import (Flask, Response, jsonify, render_template, request,
|
2019-05-29 04:18:32 +02:00
|
|
|
send_from_directory, stream_with_context)
|
2019-05-29 01:30:57 +02:00
|
|
|
from helpers import (contributor_helper, geo_helper, live_helper,
|
|
|
|
trendings_helper, users_helper)
|
2017-11-06 10:42:51 +01:00
|
|
|
|
2018-03-31 12:21:52 +02:00
|
|
|
configfile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config/config.cfg')
|
2017-09-11 14:53:06 +02:00
|
|
|
cfg = configparser.ConfigParser()
|
|
|
|
cfg.read(configfile)
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2017-12-05 09:02:49 +01:00
|
|
|
logger = logging.getLogger('werkzeug')
|
|
|
|
logger.setLevel(logging.ERROR)
|
2017-12-04 16:44:44 +01:00
|
|
|
|
2017-12-12 13:43:12 +01:00
|
|
|
server_host = cfg.get("Server", "host")
|
|
|
|
server_port = cfg.getint("Server", "port")
|
2019-06-01 09:24:26 +02:00
|
|
|
server_debug = cfg.get("Server", "debug")
|
2017-12-12 13:43:12 +01:00
|
|
|
|
2017-08-24 07:25:13 +02:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
2017-10-11 10:47:11 +02:00
|
|
|
redis_server_log = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-11 10:47:11 +02:00
|
|
|
db=cfg.getint('RedisLog', 'db'))
|
|
|
|
redis_server_map = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-11 10:47:11 +02:00
|
|
|
db=cfg.getint('RedisMap', 'db'))
|
2017-10-25 17:32:06 +02:00
|
|
|
serv_redis_db = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-25 17:32:06 +02:00
|
|
|
db=cfg.getint('RedisDB', 'db'))
|
2017-10-11 10:47:11 +02:00
|
|
|
|
2018-09-27 09:38:39 +02:00
|
|
|
streamLogCacheKey = cfg.get('RedisLog', 'streamLogCacheKey')
|
|
|
|
streamMapCacheKey = cfg.get('RedisLog', 'streamMapCacheKey')
|
|
|
|
|
|
|
|
live_helper = live_helper.Live_helper(serv_redis_db, cfg)
|
2017-11-27 09:47:55 +01:00
|
|
|
geo_helper = geo_helper.Geo_helper(serv_redis_db, cfg)
|
2017-11-06 10:42:51 +01:00
|
|
|
contributor_helper = contributor_helper.Contributor_helper(serv_redis_db, cfg)
|
2017-11-15 09:36:44 +01:00
|
|
|
users_helper = users_helper.Users_helper(serv_redis_db, cfg)
|
2017-11-16 15:08:14 +01:00
|
|
|
trendings_helper = trendings_helper.Trendings_helper(serv_redis_db, cfg)
|
2017-11-02 16:10:40 +01:00
|
|
|
|
2017-09-11 14:53:06 +02:00
|
|
|
|
2017-11-03 09:38:11 +01:00
|
|
|
##########
|
|
|
|
## UTIL ##
|
|
|
|
##########
|
|
|
|
|
|
|
|
''' INDEX '''
|
2017-09-11 14:53:06 +02:00
|
|
|
class LogItem():
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2017-09-12 12:27:35 +02:00
|
|
|
FIELDNAME_ORDER = []
|
2017-10-24 15:17:52 +02:00
|
|
|
FIELDNAME_ORDER_HEADER = []
|
2017-12-05 10:23:40 +01:00
|
|
|
for item in json.loads(cfg.get('Dashboard', 'fieldname_order')):
|
2017-10-24 15:17:52 +02:00
|
|
|
if type(item) is list:
|
|
|
|
FIELDNAME_ORDER_HEADER.append(" | ".join(item))
|
|
|
|
else:
|
|
|
|
FIELDNAME_ORDER_HEADER.append(item)
|
2017-09-12 12:27:35 +02:00
|
|
|
FIELDNAME_ORDER.append(item)
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2019-02-22 10:41:54 +01:00
|
|
|
def __init__(self, feed, filters={}):
|
|
|
|
self.filters = filters
|
|
|
|
self.feed = feed
|
2017-09-12 12:27:35 +02:00
|
|
|
self.fields = []
|
2017-08-24 11:43:23 +02:00
|
|
|
|
|
|
|
def get_head_row(self):
|
|
|
|
to_ret = []
|
2017-10-24 15:17:52 +02:00
|
|
|
for fn in LogItem.FIELDNAME_ORDER_HEADER:
|
2017-10-27 12:05:14 +02:00
|
|
|
to_ret.append(fn)
|
2017-08-24 11:43:23 +02:00
|
|
|
return to_ret
|
|
|
|
|
|
|
|
def get_row(self):
|
2019-02-22 10:41:54 +01:00
|
|
|
if not self.pass_filter():
|
|
|
|
return False
|
|
|
|
|
2017-08-24 11:43:23 +02:00
|
|
|
to_ret = {}
|
2019-02-22 10:41:54 +01:00
|
|
|
for i, field in enumerate(json.loads(cfg.get('Dashboard', 'fieldname_order'))):
|
|
|
|
if type(field) is list:
|
|
|
|
to_join = []
|
|
|
|
for subField in field:
|
|
|
|
to_join.append(str(util.getFields(self.feed, subField)))
|
|
|
|
to_add = cfg.get('Dashboard', 'char_separator').join(to_join)
|
|
|
|
else:
|
|
|
|
to_add = util.getFields(self.feed, field)
|
|
|
|
to_ret[i] = to_add if to_add is not None else ''
|
2017-08-24 11:43:23 +02:00
|
|
|
return to_ret
|
|
|
|
|
|
|
|
|
2019-02-22 10:41:54 +01:00
|
|
|
def pass_filter(self):
|
|
|
|
for filter, filterValue in self.filters.items():
|
|
|
|
jsonValue = util.getFields(self.feed, filter)
|
|
|
|
if jsonValue is None or jsonValue != filterValue:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2017-08-24 11:43:23 +02:00
|
|
|
class EventMessage():
|
2017-09-11 14:53:06 +02:00
|
|
|
# Suppose the event message is a json with the format {name: 'feedName', log:'logData'}
|
2019-02-22 10:41:54 +01:00
|
|
|
def __init__(self, msg, filters):
|
|
|
|
if not isinstance(msg, dict):
|
|
|
|
msg = msg.decode('utf8')
|
|
|
|
try:
|
|
|
|
jsonMsg = json.loads(msg)
|
|
|
|
jsonMsg['log'] = json.loads(jsonMsg['log'])
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
logger.error(e)
|
|
|
|
jsonMsg = { 'name': "undefined" ,'log': json.loads(msg) }
|
|
|
|
else:
|
|
|
|
jsonMsg = msg
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2018-09-27 09:38:39 +02:00
|
|
|
self.name = jsonMsg['name']
|
2017-10-25 10:41:46 +02:00
|
|
|
self.zmqName = jsonMsg['zmqName']
|
2019-02-22 15:16:50 +01:00
|
|
|
|
2019-02-22 10:41:54 +01:00
|
|
|
if self.name == 'Attribute':
|
|
|
|
self.feed = jsonMsg['log']
|
|
|
|
self.feed = LogItem(self.feed, filters).get_row()
|
2019-02-22 15:16:50 +01:00
|
|
|
elif self.name == 'ObjectAttribute':
|
|
|
|
self.feed = jsonMsg['log']
|
|
|
|
self.feed = LogItem(self.feed, filters).get_row()
|
2019-02-22 10:41:54 +01:00
|
|
|
else:
|
|
|
|
self.feed = jsonMsg['log']
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2018-09-27 09:38:39 +02:00
|
|
|
def to_json_ev(self):
|
2019-02-22 10:41:54 +01:00
|
|
|
if self.feed is not False:
|
|
|
|
to_ret = { 'log': self.feed, 'name': self.name, 'zmqName': self.zmqName }
|
|
|
|
return 'data: {}\n\n'.format(json.dumps(to_ret))
|
|
|
|
else:
|
|
|
|
return ''
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2018-09-27 09:38:39 +02:00
|
|
|
def to_json(self):
|
2019-02-22 10:41:54 +01:00
|
|
|
if self.feed is not False:
|
|
|
|
to_ret = { 'log': self.feed, 'name': self.name, 'zmqName': self.zmqName }
|
|
|
|
return json.dumps(to_ret)
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def to_dict(self):
|
|
|
|
return {'log': self.feed, 'name': self.name, 'zmqName': self.zmqName}
|
|
|
|
|
2018-09-27 09:38:39 +02:00
|
|
|
|
2017-11-03 09:35:51 +01:00
|
|
|
###########
|
|
|
|
## ROUTE ##
|
|
|
|
###########
|
|
|
|
|
|
|
|
''' MAIN ROUTE '''
|
|
|
|
|
2017-08-24 07:25:13 +02:00
|
|
|
@app.route("/")
|
|
|
|
def index():
|
2017-10-24 09:32:23 +02:00
|
|
|
ratioCorrection = 88
|
|
|
|
pannelSize = [
|
|
|
|
"{:.0f}".format(cfg.getint('Dashboard' ,'size_openStreet_pannel_perc')/100*ratioCorrection),
|
|
|
|
"{:.0f}".format((100-cfg.getint('Dashboard' ,'size_openStreet_pannel_perc'))/100*ratioCorrection),
|
|
|
|
"{:.0f}".format(cfg.getint('Dashboard' ,'size_world_pannel_perc')/100*ratioCorrection),
|
|
|
|
"{:.0f}".format((100-cfg.getint('Dashboard' ,'size_world_pannel_perc'))/100*ratioCorrection)
|
|
|
|
]
|
2017-10-30 16:28:32 +01:00
|
|
|
return render_template('index.html',
|
2017-10-24 09:32:23 +02:00
|
|
|
pannelSize=pannelSize,
|
2017-10-25 12:12:14 +02:00
|
|
|
size_dashboard_width=[cfg.getint('Dashboard' ,'size_dashboard_left_width'), 12-cfg.getint('Dashboard', 'size_dashboard_left_width')],
|
2017-10-27 15:33:09 +02:00
|
|
|
itemToPlot=cfg.get('Dashboard', 'item_to_plot'),
|
2017-10-23 15:32:44 +02:00
|
|
|
graph_log_refresh_rate=cfg.getint('Dashboard' ,'graph_log_refresh_rate'),
|
2017-12-05 11:04:26 +01:00
|
|
|
char_separator=cfg.get('Dashboard', 'char_separator'),
|
2017-10-23 15:32:44 +02:00
|
|
|
rotation_wait_time=cfg.getint('Dashboard' ,'rotation_wait_time'),
|
|
|
|
max_img_rotation=cfg.getint('Dashboard' ,'max_img_rotation'),
|
2017-10-23 16:56:25 +02:00
|
|
|
hours_spanned=cfg.getint('Dashboard' ,'hours_spanned'),
|
|
|
|
zoomlevel=cfg.getint('Dashboard' ,'zoomlevel')
|
2017-09-11 15:05:12 +02:00
|
|
|
)
|
2017-08-24 07:25:13 +02:00
|
|
|
|
2019-05-29 04:18:32 +02:00
|
|
|
@app.route('/favicon.ico')
|
|
|
|
def favicon():
|
|
|
|
return send_from_directory(os.path.join(app.root_path, 'static'),
|
|
|
|
'favicon.ico', mimetype='image/vnd.microsoft.icon')
|
2017-10-25 16:22:14 +02:00
|
|
|
|
|
|
|
@app.route("/geo")
|
|
|
|
def geo():
|
2017-10-27 09:36:39 +02:00
|
|
|
return render_template('geo.html',
|
|
|
|
zoomlevel=cfg.getint('GEO' ,'zoomlevel'),
|
|
|
|
default_updateFrequency=cfg.getint('GEO' ,'updateFrequency')
|
|
|
|
)
|
2017-10-25 16:22:14 +02:00
|
|
|
|
2017-10-30 16:28:32 +01:00
|
|
|
@app.route("/contrib")
|
|
|
|
def contrib():
|
2017-11-06 10:42:51 +01:00
|
|
|
categ_list = contributor_helper.categories_in_datatable
|
2017-11-13 12:57:05 +01:00
|
|
|
categ_list_str = [ s[0].upper() + s[1:].replace('_', ' ') for s in categ_list]
|
2017-11-06 17:14:10 +01:00
|
|
|
categ_list_points = [contributor_helper.DICO_PNTS_REWARD[categ] for categ in categ_list]
|
2017-11-08 11:29:07 +01:00
|
|
|
|
2017-11-07 12:54:08 +01:00
|
|
|
org_rank = contributor_helper.org_rank
|
2017-11-08 11:29:07 +01:00
|
|
|
org_rank_requirement_pnts = contributor_helper.org_rank_requirement_pnts
|
|
|
|
org_rank_requirement_text = contributor_helper.org_rank_requirement_text
|
|
|
|
org_rank_list = [[rank, title, org_rank_requirement_pnts[rank], org_rank_requirement_text[rank]] for rank, title in org_rank.items()]
|
2017-11-07 12:54:08 +01:00
|
|
|
org_rank_list.sort(key=lambda x: x[0])
|
2017-11-08 11:29:07 +01:00
|
|
|
org_rank_additional_text = contributor_helper.org_rank_additional_info
|
|
|
|
|
|
|
|
org_honor_badge_title = contributor_helper.org_honor_badge_title
|
|
|
|
org_honor_badge_title_list = [ [num, text] for num, text in contributor_helper.org_honor_badge_title.items()]
|
|
|
|
org_honor_badge_title_list.sort(key=lambda x: x[0])
|
2017-11-07 12:54:08 +01:00
|
|
|
|
2017-11-13 12:57:05 +01:00
|
|
|
trophy_categ_list = contributor_helper.categories_in_trophy
|
|
|
|
trophy_categ_list_str = [ s[0].upper() + s[1:].replace('_', ' ') for s in trophy_categ_list]
|
|
|
|
trophy_title = contributor_helper.trophy_title
|
2017-11-24 15:09:34 +01:00
|
|
|
trophy_title_str = []
|
|
|
|
for i in range(contributor_helper.trophyNum+1):
|
|
|
|
trophy_title_str.append(trophy_title[i])
|
|
|
|
trophy_mapping = ["Top 1"] + [ str(x)+"%" for x in contributor_helper.trophyMapping] + [" "]
|
|
|
|
trophy_mapping.reverse()
|
2017-11-13 12:57:05 +01:00
|
|
|
|
2017-11-06 09:58:59 +01:00
|
|
|
currOrg = request.args.get('org')
|
|
|
|
if currOrg is None:
|
|
|
|
currOrg = ""
|
2017-10-30 16:28:32 +01:00
|
|
|
return render_template('contrib.html',
|
2017-11-06 09:58:59 +01:00
|
|
|
currOrg=currOrg,
|
2017-11-06 10:42:51 +01:00
|
|
|
rankMultiplier=contributor_helper.rankMultiplier,
|
2017-11-06 17:14:10 +01:00
|
|
|
default_pnts_per_contribution=contributor_helper.default_pnts_per_contribution,
|
|
|
|
additional_help_text=json.loads(cfg.get('CONTRIB', 'additional_help_text')),
|
2017-11-02 16:10:40 +01:00
|
|
|
categ_list=json.dumps(categ_list),
|
2017-11-06 15:00:51 +01:00
|
|
|
categ_list_str=categ_list_str,
|
2017-11-06 17:14:10 +01:00
|
|
|
categ_list_points=categ_list_points,
|
2017-11-07 12:54:08 +01:00
|
|
|
org_rank_json=json.dumps(org_rank),
|
|
|
|
org_rank_list=org_rank_list,
|
2017-11-08 11:29:07 +01:00
|
|
|
org_rank_additional_text=org_rank_additional_text,
|
|
|
|
org_honor_badge_title=json.dumps(org_honor_badge_title),
|
|
|
|
org_honor_badge_title_list=org_honor_badge_title_list,
|
2017-11-13 12:57:05 +01:00
|
|
|
trophy_categ_list=json.dumps(trophy_categ_list),
|
|
|
|
trophy_categ_list_id=trophy_categ_list,
|
|
|
|
trophy_categ_list_str=trophy_categ_list_str,
|
|
|
|
trophy_title=json.dumps(trophy_title),
|
2017-11-24 15:09:34 +01:00
|
|
|
trophy_title_str=trophy_title_str,
|
|
|
|
trophy_mapping=trophy_mapping,
|
2017-11-06 15:00:51 +01:00
|
|
|
min_between_reload=cfg.getint('CONTRIB', 'min_between_reload')
|
2017-10-30 16:28:32 +01:00
|
|
|
)
|
|
|
|
|
2017-11-14 15:44:53 +01:00
|
|
|
@app.route("/users")
|
|
|
|
def users():
|
|
|
|
return render_template('users.html',
|
|
|
|
)
|
|
|
|
|
2017-11-14 17:03:46 +01:00
|
|
|
|
2017-11-16 12:23:02 +01:00
|
|
|
@app.route("/trendings")
|
|
|
|
def trendings():
|
2017-11-21 15:20:07 +01:00
|
|
|
maxNum = request.args.get('maxNum')
|
|
|
|
try:
|
|
|
|
maxNum = int(maxNum)
|
|
|
|
except:
|
|
|
|
maxNum = 15
|
2018-01-16 09:28:16 +01:00
|
|
|
url_misp_event = cfg.get('RedisGlobal', 'misp_web_url')
|
2017-11-21 15:20:07 +01:00
|
|
|
|
2017-11-16 12:23:02 +01:00
|
|
|
return render_template('trendings.html',
|
2018-01-16 09:28:16 +01:00
|
|
|
maxNum=maxNum,
|
|
|
|
url_misp_event=url_misp_event
|
2017-11-16 12:23:02 +01:00
|
|
|
)
|
|
|
|
|
2017-11-03 09:35:51 +01:00
|
|
|
''' INDEX '''
|
|
|
|
|
|
|
|
@app.route("/_logs")
|
|
|
|
def logs():
|
2018-09-27 09:38:39 +02:00
|
|
|
if request.accept_mimetypes.accept_json or request.method == 'POST':
|
|
|
|
key = 'Attribute'
|
|
|
|
j = live_helper.get_stream_log_cache(key)
|
2019-02-22 10:41:54 +01:00
|
|
|
to_ret = []
|
|
|
|
for item in j:
|
|
|
|
filters = request.cookies.get('filters', '{}')
|
|
|
|
filters = json.loads(filters)
|
|
|
|
ev = EventMessage(item, filters)
|
|
|
|
if ev is not None:
|
|
|
|
dico = ev.to_dict()
|
|
|
|
if dico['log'] != False:
|
|
|
|
to_ret.append(dico)
|
|
|
|
return jsonify(to_ret)
|
2018-09-27 09:38:39 +02:00
|
|
|
else:
|
2019-02-22 10:41:54 +01:00
|
|
|
return Response(stream_with_context(event_stream_log()), mimetype="text/event-stream")
|
2017-11-03 09:35:51 +01:00
|
|
|
|
|
|
|
@app.route("/_maps")
|
|
|
|
def maps():
|
2018-09-27 09:38:39 +02:00
|
|
|
if request.accept_mimetypes.accept_json or request.method == 'POST':
|
|
|
|
key = 'Map'
|
|
|
|
j = live_helper.get_stream_log_cache(key)
|
|
|
|
return jsonify(j)
|
|
|
|
else:
|
|
|
|
return Response(event_stream_maps(), mimetype="text/event-stream")
|
2017-11-03 09:35:51 +01:00
|
|
|
|
|
|
|
@app.route("/_get_log_head")
|
|
|
|
def getLogHead():
|
|
|
|
return json.dumps(LogItem('').get_head_row())
|
|
|
|
|
|
|
|
def event_stream_log():
|
2018-09-24 10:26:15 +02:00
|
|
|
subscriber_log = redis_server_log.pubsub(ignore_subscribe_messages=True)
|
2018-09-27 09:38:39 +02:00
|
|
|
subscriber_log.subscribe(live_helper.CHANNEL)
|
2018-09-24 10:26:15 +02:00
|
|
|
try:
|
|
|
|
for msg in subscriber_log.listen():
|
2019-02-22 10:41:54 +01:00
|
|
|
filters = request.cookies.get('filters', '{}')
|
|
|
|
filters = json.loads(filters)
|
2018-09-24 10:26:15 +02:00
|
|
|
content = msg['data']
|
2019-02-22 10:41:54 +01:00
|
|
|
ev = EventMessage(content, filters)
|
|
|
|
if ev is not None:
|
|
|
|
yield ev.to_json_ev()
|
|
|
|
else:
|
|
|
|
pass
|
2018-09-24 10:26:15 +02:00
|
|
|
except GeneratorExit:
|
|
|
|
subscriber_log.unsubscribe()
|
2017-11-03 09:35:51 +01:00
|
|
|
|
|
|
|
def event_stream_maps():
|
2018-09-24 10:26:15 +02:00
|
|
|
subscriber_map = redis_server_map.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_map.psubscribe(cfg.get('RedisMap', 'channelDisp'))
|
|
|
|
try:
|
|
|
|
for msg in subscriber_map.listen():
|
|
|
|
content = msg['data'].decode('utf8')
|
2018-09-27 09:38:39 +02:00
|
|
|
to_ret = 'data: {}\n\n'.format(content)
|
|
|
|
yield to_ret
|
2018-09-24 10:26:15 +02:00
|
|
|
except GeneratorExit:
|
|
|
|
subscriber_map.unsubscribe()
|
2017-11-03 09:35:51 +01:00
|
|
|
|
|
|
|
''' GEO '''
|
|
|
|
|
|
|
|
@app.route("/_getTopCoord")
|
|
|
|
def getTopCoord():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
2017-11-27 09:47:55 +01:00
|
|
|
data = geo_helper.getTopCoord(date)
|
2017-11-03 09:35:51 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getHitMap")
|
|
|
|
def getHitMap():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
2017-11-27 09:47:55 +01:00
|
|
|
data = geo_helper.getHitMap(date)
|
2017-11-03 09:35:51 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getCoordsByRadius")
|
|
|
|
def getCoordsByRadius():
|
|
|
|
try:
|
|
|
|
dateStart = datetime.datetime.fromtimestamp(float(request.args.get('dateStart')))
|
|
|
|
dateEnd = datetime.datetime.fromtimestamp(float(request.args.get('dateEnd')))
|
|
|
|
centerLat = request.args.get('centerLat')
|
|
|
|
centerLon = request.args.get('centerLon')
|
|
|
|
radius = int(math.ceil(float(request.args.get('radius'))))
|
|
|
|
except:
|
2017-11-27 09:47:55 +01:00
|
|
|
return jsonify([])
|
|
|
|
|
|
|
|
data = geo_helper.getCoordsByRadius(dateStart, dateEnd, centerLat, centerLon, radius)
|
|
|
|
return jsonify(data)
|
2017-11-03 09:35:51 +01:00
|
|
|
|
|
|
|
''' CONTRIB '''
|
|
|
|
|
2017-11-06 13:43:55 +01:00
|
|
|
@app.route("/_getLastContributors")
|
|
|
|
def getLastContributors():
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getLastContributorsFromRedis())
|
2017-11-06 13:43:55 +01:00
|
|
|
|
|
|
|
@app.route("/_eventStreamLastContributor")
|
2017-10-31 15:08:44 +01:00
|
|
|
def getLastContributor():
|
2017-11-06 13:43:55 +01:00
|
|
|
return Response(eventStreamLastContributor(), mimetype="text/event-stream")
|
|
|
|
|
2017-11-13 16:26:09 +01:00
|
|
|
@app.route("/_eventStreamAwards")
|
|
|
|
def getLastStreamAwards():
|
|
|
|
return Response(eventStreamAwards(), mimetype="text/event-stream")
|
|
|
|
|
2017-11-06 13:43:55 +01:00
|
|
|
def eventStreamLastContributor():
|
2018-09-24 10:26:15 +02:00
|
|
|
subscriber_lastContrib = redis_server_log.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_lastContrib.psubscribe(cfg.get('RedisLog', 'channelLastContributor'))
|
|
|
|
try:
|
|
|
|
for msg in subscriber_lastContrib.listen():
|
|
|
|
content = msg['data'].decode('utf8')
|
|
|
|
contentJson = json.loads(content)
|
|
|
|
lastContribJson = json.loads(contentJson['log'])
|
|
|
|
org = lastContribJson['org']
|
|
|
|
to_return = contributor_helper.getContributorFromRedis(org)
|
|
|
|
epoch = lastContribJson['epoch']
|
|
|
|
to_return['epoch'] = epoch
|
|
|
|
yield 'data: {}\n\n'.format(json.dumps(to_return))
|
|
|
|
except GeneratorExit:
|
|
|
|
subscriber_lastContrib.unsubscribe()
|
2017-10-31 15:08:44 +01:00
|
|
|
|
2017-11-13 16:26:09 +01:00
|
|
|
def eventStreamAwards():
|
2018-09-24 10:26:15 +02:00
|
|
|
subscriber_lastAwards = redis_server_log.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_lastAwards.psubscribe(cfg.get('RedisLog', 'channelLastAwards'))
|
|
|
|
try:
|
|
|
|
for msg in subscriber_lastAwards.listen():
|
|
|
|
content = msg['data'].decode('utf8')
|
|
|
|
contentJson = json.loads(content)
|
|
|
|
lastAwardJson = json.loads(contentJson['log'])
|
|
|
|
org = lastAwardJson['org']
|
|
|
|
to_return = contributor_helper.getContributorFromRedis(org)
|
|
|
|
epoch = lastAwardJson['epoch']
|
|
|
|
to_return['epoch'] = epoch
|
|
|
|
to_return['award'] = lastAwardJson['award']
|
|
|
|
yield 'data: {}\n\n'.format(json.dumps(to_return))
|
|
|
|
except GeneratorExit:
|
|
|
|
subscriber_lastAwards.unsubscribe()
|
2017-11-13 16:26:09 +01:00
|
|
|
|
2017-10-30 16:28:32 +01:00
|
|
|
@app.route("/_getTopContributor")
|
2017-12-01 16:02:28 +01:00
|
|
|
def getTopContributor(suppliedDate=None, maxNum=100):
|
2017-11-06 09:50:28 +01:00
|
|
|
if suppliedDate is None:
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
else:
|
|
|
|
date = suppliedDate
|
|
|
|
|
2017-12-01 16:02:28 +01:00
|
|
|
data = contributor_helper.getTopContributorFromRedis(date, maxNum=maxNum)
|
2017-11-06 09:50:28 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getFameContributor")
|
|
|
|
def getFameContributor():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
# get previous month
|
|
|
|
date = (datetime.datetime(today.year, today.month, 1) - datetime.timedelta(days=1))
|
2017-12-01 16:02:28 +01:00
|
|
|
return getTopContributor(suppliedDate=date, maxNum=10)
|
2017-11-06 09:50:28 +01:00
|
|
|
|
2017-11-14 15:44:53 +01:00
|
|
|
@app.route("/_getFameQualContributor")
|
|
|
|
def getFameQualContributor():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
# get previous month
|
|
|
|
date = (datetime.datetime(today.year, today.month, 1) - datetime.timedelta(days=1))
|
2017-12-01 16:02:28 +01:00
|
|
|
return getTopContributor(suppliedDate=date, maxNum=10)
|
2017-10-30 16:28:32 +01:00
|
|
|
|
|
|
|
@app.route("/_getTop5Overtime")
|
|
|
|
def getTop5Overtime():
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getTop5OvertimeFromRedis())
|
2017-10-30 16:28:32 +01:00
|
|
|
|
2017-11-09 10:32:39 +01:00
|
|
|
@app.route("/_getOrgOvertime")
|
|
|
|
def getOrgOvertime():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getOrgOvertime(org))
|
2017-11-09 10:32:39 +01:00
|
|
|
|
2017-10-31 12:35:05 +01:00
|
|
|
@app.route("/_getCategPerContrib")
|
|
|
|
def getCategPerContrib():
|
2017-11-06 09:50:28 +01:00
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
2017-11-02 16:10:40 +01:00
|
|
|
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getCategPerContribFromRedis(date))
|
2017-11-13 16:02:09 +01:00
|
|
|
|
|
|
|
@app.route("/_getLatestAwards")
|
|
|
|
def getLatestAwards():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
2017-11-14 09:50:20 +01:00
|
|
|
return jsonify(contributor_helper.getLastAwardsFromRedis())
|
2017-10-31 12:35:05 +01:00
|
|
|
|
2017-10-31 15:49:04 +01:00
|
|
|
@app.route("/_getAllOrg")
|
|
|
|
def getAllOrg():
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getAllOrgFromRedis())
|
2017-10-31 15:49:04 +01:00
|
|
|
|
2017-11-02 10:57:16 +01:00
|
|
|
@app.route("/_getOrgRank")
|
|
|
|
def getOrgRank():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getCurrentOrgRankFromRedis(org))
|
2017-11-08 15:34:02 +01:00
|
|
|
|
|
|
|
@app.route("/_getContributionOrgStatus")
|
|
|
|
def getContributionOrgStatus():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getCurrentContributionStatus(org))
|
2017-11-08 15:34:02 +01:00
|
|
|
|
|
|
|
@app.route("/_getHonorBadges")
|
|
|
|
def getHonorBadges():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getOrgHonorBadges(org))
|
2017-11-02 10:57:16 +01:00
|
|
|
|
2017-11-13 12:57:05 +01:00
|
|
|
@app.route("/_getTrophies")
|
|
|
|
def getTrophies():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-14 09:50:20 +01:00
|
|
|
return jsonify(contributor_helper.getOrgTrophies(org))
|
2017-11-13 12:57:05 +01:00
|
|
|
|
2018-09-25 14:48:13 +02:00
|
|
|
@app.route("/_getAllOrgsTrophyRanking")
|
|
|
|
@app.route("/_getAllOrgsTrophyRanking/<string:categ>")
|
|
|
|
def getAllOrgsTrophyRanking(categ=None):
|
|
|
|
return jsonify(contributor_helper.getAllOrgsTrophyRanking(categ))
|
|
|
|
|
2017-11-15 13:45:01 +01:00
|
|
|
|
|
|
|
''' USERS '''
|
|
|
|
|
|
|
|
@app.route("/_getUserLogins")
|
|
|
|
def getUserLogins():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
2017-12-05 16:04:28 +01:00
|
|
|
org = request.args.get('org', None)
|
|
|
|
data = users_helper.getUserLoginsForPunchCard(date, org)
|
2017-11-15 13:45:01 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2018-10-01 13:28:27 +02:00
|
|
|
@app.route("/_getAllLoggedOrg")
|
|
|
|
def getAllLoggedOrg():
|
|
|
|
return jsonify(users_helper.getAllOrg())
|
|
|
|
|
2017-11-15 13:45:01 +01:00
|
|
|
@app.route("/_getTopOrglogin")
|
|
|
|
def getTopOrglogin():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
2017-11-15 15:40:28 +01:00
|
|
|
data = users_helper.getTopOrglogin(date, maxNum=12)
|
2017-11-15 13:45:01 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-15 14:34:37 +01:00
|
|
|
@app.route("/_getLoginVSCOntribution")
|
|
|
|
def getLoginVSCOntribution():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = users_helper.getLoginVSCOntribution(date)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-21 11:59:07 +01:00
|
|
|
@app.route("/_getUserLoginsAndContribOvertime")
|
|
|
|
def getUserLoginsAndContribOvertime():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
2017-12-05 16:04:28 +01:00
|
|
|
org = request.args.get('org', None)
|
|
|
|
data = users_helper.getUserLoginsAndContribOvertime(date, org)
|
2017-11-21 11:59:07 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-16 15:08:14 +01:00
|
|
|
''' TRENDINGS '''
|
|
|
|
@app.route("/_getTrendingEvents")
|
|
|
|
def getTrendingEvents():
|
|
|
|
try:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
2017-11-16 15:08:14 +01:00
|
|
|
except:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
2017-11-16 15:08:14 +01:00
|
|
|
|
2017-11-21 08:42:54 +01:00
|
|
|
specificLabel = request.args.get('specificLabel')
|
2018-09-24 10:26:15 +02:00
|
|
|
data = trendings_helper.getTrendingEvents(dateS, dateE, specificLabel, topNum=int(request.args.get('topNum', 10)))
|
2017-11-16 15:08:14 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getTrendingCategs")
|
|
|
|
def getTrendingCategs():
|
|
|
|
try:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
2017-11-16 15:08:14 +01:00
|
|
|
except:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
2017-11-16 15:08:14 +01:00
|
|
|
|
2017-11-17 09:13:18 +01:00
|
|
|
|
2018-09-24 10:26:15 +02:00
|
|
|
data = trendings_helper.getTrendingCategs(dateS, dateE, topNum=int(request.args.get('topNum', 10)))
|
2017-11-16 15:08:14 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getTrendingTags")
|
|
|
|
def getTrendingTags():
|
|
|
|
try:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
2017-11-16 15:08:14 +01:00
|
|
|
except:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
2017-11-16 15:08:14 +01:00
|
|
|
|
2018-09-24 10:26:15 +02:00
|
|
|
data = trendings_helper.getTrendingTags(dateS, dateE, topNum=int(request.args.get('topNum', 10)))
|
2017-11-16 15:08:14 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-17 14:45:20 +01:00
|
|
|
@app.route("/_getTrendingSightings")
|
|
|
|
def getTrendingSightings():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = trendings_helper.getTrendingSightings(dateS, dateE)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-17 16:50:04 +01:00
|
|
|
@app.route("/_getTrendingDisc")
|
|
|
|
def getTrendingDisc():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
|
|
|
|
|
|
|
data = trendings_helper.getTrendingDisc(dateS, dateE)
|
|
|
|
return jsonify(data)
|
2017-11-15 14:34:37 +01:00
|
|
|
|
2017-11-20 17:42:25 +01:00
|
|
|
@app.route("/_getTypeaheadData")
|
|
|
|
def getTypeaheadData():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = trendings_helper.getTypeaheadData(dateS, dateE)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-12-11 12:19:11 +01:00
|
|
|
@app.route("/_getGenericTrendingOvertime")
|
|
|
|
def getGenericTrendingOvertime():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
2017-12-11 14:54:22 +01:00
|
|
|
choice = request.args.get('choice', 'events')
|
2017-12-11 12:19:11 +01:00
|
|
|
|
2018-09-24 10:26:15 +02:00
|
|
|
data = trendings_helper.getGenericTrendingOvertime(dateS, dateE, choice=choice)
|
2017-12-11 12:19:11 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2017-08-24 07:25:13 +02:00
|
|
|
if __name__ == '__main__':
|
2019-05-29 02:09:14 +02:00
|
|
|
try:
|
|
|
|
app.run(host=server_host,
|
|
|
|
port=server_port,
|
2019-06-01 09:24:26 +02:00
|
|
|
debug=server_debug,
|
2019-05-29 02:09:14 +02:00
|
|
|
threaded=True)
|
|
|
|
except OSError as error:
|
|
|
|
if error.errno == 98:
|
|
|
|
print("\n\n\nAddress already in use, the defined port is: " + str(server_port))
|
|
|
|
else:
|
|
|
|
print(str(error))
|