2017-08-24 07:25:13 +02:00
|
|
|
#!/usr/bin/env python3.5
|
2017-10-25 17:32:06 +02:00
|
|
|
from flask import Flask, render_template, request, Response, jsonify
|
2017-08-24 11:43:23 +02:00
|
|
|
import json
|
2017-09-11 14:53:06 +02:00
|
|
|
import redis
|
2017-10-26 16:59:02 +02:00
|
|
|
import random, math
|
2017-09-11 14:53:06 +02:00
|
|
|
import configparser
|
2017-08-24 11:47:54 +02:00
|
|
|
from time import gmtime as now
|
|
|
|
from time import sleep, strftime
|
2017-10-25 17:32:06 +02:00
|
|
|
import datetime
|
2017-09-11 14:53:06 +02:00
|
|
|
import os
|
|
|
|
|
2017-11-06 10:42:51 +01:00
|
|
|
import util
|
|
|
|
import contributor_helper
|
2017-11-15 09:36:44 +01:00
|
|
|
import users_helper
|
2017-11-16 15:08:14 +01:00
|
|
|
import trendings_helper
|
2017-11-06 10:42:51 +01:00
|
|
|
|
2017-10-30 09:17:57 +01:00
|
|
|
configfile = os.path.join(os.environ['DASH_CONFIG'], 'config.cfg')
|
2017-09-11 14:53:06 +02:00
|
|
|
cfg = configparser.ConfigParser()
|
|
|
|
cfg.read(configfile)
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2017-08-24 07:25:13 +02:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
2017-10-11 10:47:11 +02:00
|
|
|
redis_server_log = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-11 10:47:11 +02:00
|
|
|
db=cfg.getint('RedisLog', 'db'))
|
|
|
|
redis_server_map = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-11 10:47:11 +02:00
|
|
|
db=cfg.getint('RedisMap', 'db'))
|
2017-10-25 17:32:06 +02:00
|
|
|
serv_redis_db = redis.StrictRedis(
|
2017-10-27 16:36:27 +02:00
|
|
|
host=cfg.get('RedisGlobal', 'host'),
|
|
|
|
port=cfg.getint('RedisGlobal', 'port'),
|
2017-10-25 17:32:06 +02:00
|
|
|
db=cfg.getint('RedisDB', 'db'))
|
2017-10-11 10:47:11 +02:00
|
|
|
|
2017-11-06 10:42:51 +01:00
|
|
|
contributor_helper = contributor_helper.Contributor_helper(serv_redis_db, cfg)
|
2017-11-15 09:36:44 +01:00
|
|
|
users_helper = users_helper.Users_helper(serv_redis_db, cfg)
|
2017-11-16 15:08:14 +01:00
|
|
|
trendings_helper = trendings_helper.Trendings_helper(serv_redis_db, cfg)
|
2017-11-02 16:10:40 +01:00
|
|
|
|
2017-10-11 10:47:11 +02:00
|
|
|
subscriber_log = redis_server_log.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_log.psubscribe(cfg.get('RedisLog', 'channel'))
|
|
|
|
subscriber_map = redis_server_map.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_map.psubscribe(cfg.get('RedisMap', 'channelDisp'))
|
2017-11-06 13:43:55 +01:00
|
|
|
subscriber_lastContrib = redis_server_log.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_lastContrib.psubscribe(cfg.get('RedisLog', 'channelLastContributor'))
|
2017-11-13 16:26:09 +01:00
|
|
|
subscriber_lastAwards = redis_server_log.pubsub(ignore_subscribe_messages=True)
|
|
|
|
subscriber_lastAwards.psubscribe(cfg.get('RedisLog', 'channelLastAwards'))
|
|
|
|
|
2017-09-11 14:53:06 +02:00
|
|
|
eventNumber = 0
|
|
|
|
|
2017-11-03 09:38:11 +01:00
|
|
|
##########
|
|
|
|
## UTIL ##
|
|
|
|
##########
|
|
|
|
|
|
|
|
''' INDEX '''
|
2017-09-11 14:53:06 +02:00
|
|
|
class LogItem():
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2017-09-12 12:27:35 +02:00
|
|
|
FIELDNAME_ORDER = []
|
2017-10-24 15:17:52 +02:00
|
|
|
FIELDNAME_ORDER_HEADER = []
|
2017-10-27 12:05:14 +02:00
|
|
|
FIELDNAME_ORDER.append("Time")
|
|
|
|
FIELDNAME_ORDER_HEADER.append("Time")
|
2017-09-12 12:27:35 +02:00
|
|
|
for item in json.loads(cfg.get('Log', 'fieldname_order')):
|
2017-10-24 15:17:52 +02:00
|
|
|
if type(item) is list:
|
|
|
|
FIELDNAME_ORDER_HEADER.append(" | ".join(item))
|
|
|
|
else:
|
|
|
|
FIELDNAME_ORDER_HEADER.append(item)
|
2017-09-12 12:27:35 +02:00
|
|
|
FIELDNAME_ORDER.append(item)
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2017-09-11 14:53:06 +02:00
|
|
|
def __init__(self, feed):
|
|
|
|
self.time = strftime("%H:%M:%S", now())
|
2017-09-12 12:34:15 +02:00
|
|
|
#FIXME Parse feed message?
|
2017-09-12 12:27:35 +02:00
|
|
|
self.fields = []
|
2017-09-12 12:34:15 +02:00
|
|
|
self.fields.append(self.time)
|
2017-09-12 12:27:35 +02:00
|
|
|
for f in feed:
|
|
|
|
self.fields.append(f)
|
2017-08-24 11:43:23 +02:00
|
|
|
|
|
|
|
def get_head_row(self):
|
|
|
|
to_ret = []
|
2017-10-24 15:17:52 +02:00
|
|
|
for fn in LogItem.FIELDNAME_ORDER_HEADER:
|
2017-10-27 12:05:14 +02:00
|
|
|
to_ret.append(fn)
|
2017-08-24 11:43:23 +02:00
|
|
|
return to_ret
|
|
|
|
|
|
|
|
def get_row(self):
|
|
|
|
to_ret = {}
|
|
|
|
#Number to keep them sorted (jsonify sort keys)
|
2017-10-24 15:17:52 +02:00
|
|
|
for item in range(len(LogItem.FIELDNAME_ORDER)):
|
2017-09-12 12:27:35 +02:00
|
|
|
try:
|
2017-10-24 15:17:52 +02:00
|
|
|
to_ret[item] = self.fields[item]
|
2017-09-12 12:27:35 +02:00
|
|
|
except IndexError: # not enough field in rcv item
|
2017-10-24 15:17:52 +02:00
|
|
|
to_ret[item] = ''
|
2017-08-24 11:43:23 +02:00
|
|
|
return to_ret
|
|
|
|
|
|
|
|
|
|
|
|
class EventMessage():
|
2017-09-11 14:53:06 +02:00
|
|
|
# Suppose the event message is a json with the format {name: 'feedName', log:'logData'}
|
2017-08-24 11:43:23 +02:00
|
|
|
def __init__(self, msg):
|
2017-09-11 14:53:06 +02:00
|
|
|
msg = msg.decode('utf8')
|
|
|
|
try:
|
|
|
|
jsonMsg = json.loads(msg)
|
|
|
|
except json.JSONDecodeError:
|
2017-09-12 12:27:35 +02:00
|
|
|
print('json decode error')
|
|
|
|
jsonMsg = { 'name': "undefined" ,'log': json.loads(msg) }
|
2017-08-24 11:43:23 +02:00
|
|
|
|
2017-09-11 14:53:06 +02:00
|
|
|
self.feedName = jsonMsg['name']
|
2017-10-25 10:41:46 +02:00
|
|
|
self.zmqName = jsonMsg['zmqName']
|
2017-09-12 12:27:35 +02:00
|
|
|
self.feed = json.loads(jsonMsg['log'])
|
|
|
|
self.feed = LogItem(self.feed).get_row()
|
2017-08-24 11:43:23 +02:00
|
|
|
|
|
|
|
def to_json(self):
|
2017-10-25 10:41:46 +02:00
|
|
|
to_ret = { 'log': self.feed, 'feedName': self.feedName, 'zmqName': self.zmqName }
|
2017-08-24 11:43:23 +02:00
|
|
|
return 'data: {}\n\n'.format(json.dumps(to_ret))
|
|
|
|
|
2017-11-03 09:38:11 +01:00
|
|
|
''' GENERAL '''
|
2017-11-06 09:50:28 +01:00
|
|
|
def getZrange(keyCateg, date, topNum, endSubkey=""):
|
2017-11-06 10:42:51 +01:00
|
|
|
date_str = util.getDateStrFormat(date)
|
2017-11-06 09:50:28 +01:00
|
|
|
keyname = "{}:{}{}".format(keyCateg, date_str, endSubkey)
|
|
|
|
data = serv_redis_db.zrange(keyname, 0, topNum-1, desc=True, withscores=True)
|
2017-11-03 09:38:11 +01:00
|
|
|
data = [ [record[0].decode('utf8'), record[1]] for record in data ]
|
|
|
|
return data
|
|
|
|
|
2017-11-03 09:35:51 +01:00
|
|
|
###########
|
|
|
|
## ROUTE ##
|
|
|
|
###########
|
|
|
|
|
|
|
|
''' MAIN ROUTE '''
|
|
|
|
|
2017-08-24 07:25:13 +02:00
|
|
|
@app.route("/")
|
|
|
|
def index():
|
2017-10-24 09:32:23 +02:00
|
|
|
ratioCorrection = 88
|
|
|
|
pannelSize = [
|
|
|
|
"{:.0f}".format(cfg.getint('Dashboard' ,'size_openStreet_pannel_perc')/100*ratioCorrection),
|
|
|
|
"{:.0f}".format((100-cfg.getint('Dashboard' ,'size_openStreet_pannel_perc'))/100*ratioCorrection),
|
|
|
|
"{:.0f}".format(cfg.getint('Dashboard' ,'size_world_pannel_perc')/100*ratioCorrection),
|
|
|
|
"{:.0f}".format((100-cfg.getint('Dashboard' ,'size_world_pannel_perc'))/100*ratioCorrection)
|
|
|
|
]
|
2017-10-30 16:28:32 +01:00
|
|
|
return render_template('index.html',
|
2017-10-24 09:32:23 +02:00
|
|
|
pannelSize=pannelSize,
|
2017-10-25 12:12:14 +02:00
|
|
|
size_dashboard_width=[cfg.getint('Dashboard' ,'size_dashboard_left_width'), 12-cfg.getint('Dashboard', 'size_dashboard_left_width')],
|
2017-10-27 15:33:09 +02:00
|
|
|
itemToPlot=cfg.get('Dashboard', 'item_to_plot'),
|
2017-10-23 15:32:44 +02:00
|
|
|
graph_log_refresh_rate=cfg.getint('Dashboard' ,'graph_log_refresh_rate'),
|
2017-10-24 15:17:52 +02:00
|
|
|
char_separator=cfg.get('Log', 'char_separator'),
|
2017-10-23 15:32:44 +02:00
|
|
|
rotation_wait_time=cfg.getint('Dashboard' ,'rotation_wait_time'),
|
|
|
|
max_img_rotation=cfg.getint('Dashboard' ,'max_img_rotation'),
|
2017-10-23 16:56:25 +02:00
|
|
|
hours_spanned=cfg.getint('Dashboard' ,'hours_spanned'),
|
|
|
|
zoomlevel=cfg.getint('Dashboard' ,'zoomlevel')
|
2017-09-11 15:05:12 +02:00
|
|
|
)
|
2017-08-24 07:25:13 +02:00
|
|
|
|
2017-10-25 16:22:14 +02:00
|
|
|
|
|
|
|
@app.route("/geo")
|
|
|
|
def geo():
|
2017-10-27 09:36:39 +02:00
|
|
|
return render_template('geo.html',
|
|
|
|
zoomlevel=cfg.getint('GEO' ,'zoomlevel'),
|
|
|
|
default_updateFrequency=cfg.getint('GEO' ,'updateFrequency')
|
|
|
|
)
|
2017-10-25 16:22:14 +02:00
|
|
|
|
2017-10-30 16:28:32 +01:00
|
|
|
@app.route("/contrib")
|
|
|
|
def contrib():
|
2017-11-06 10:42:51 +01:00
|
|
|
categ_list = contributor_helper.categories_in_datatable
|
2017-11-13 12:57:05 +01:00
|
|
|
categ_list_str = [ s[0].upper() + s[1:].replace('_', ' ') for s in categ_list]
|
2017-11-06 17:14:10 +01:00
|
|
|
categ_list_points = [contributor_helper.DICO_PNTS_REWARD[categ] for categ in categ_list]
|
2017-11-08 11:29:07 +01:00
|
|
|
|
2017-11-07 12:54:08 +01:00
|
|
|
org_rank = contributor_helper.org_rank
|
2017-11-08 11:29:07 +01:00
|
|
|
org_rank_requirement_pnts = contributor_helper.org_rank_requirement_pnts
|
|
|
|
org_rank_requirement_text = contributor_helper.org_rank_requirement_text
|
|
|
|
org_rank_list = [[rank, title, org_rank_requirement_pnts[rank], org_rank_requirement_text[rank]] for rank, title in org_rank.items()]
|
2017-11-07 12:54:08 +01:00
|
|
|
org_rank_list.sort(key=lambda x: x[0])
|
2017-11-08 11:29:07 +01:00
|
|
|
org_rank_additional_text = contributor_helper.org_rank_additional_info
|
|
|
|
|
|
|
|
org_honor_badge_title = contributor_helper.org_honor_badge_title
|
|
|
|
org_honor_badge_title_list = [ [num, text] for num, text in contributor_helper.org_honor_badge_title.items()]
|
|
|
|
org_honor_badge_title_list.sort(key=lambda x: x[0])
|
2017-11-07 12:54:08 +01:00
|
|
|
|
2017-11-13 12:57:05 +01:00
|
|
|
trophy_categ_list = contributor_helper.categories_in_trophy
|
|
|
|
trophy_categ_list_str = [ s[0].upper() + s[1:].replace('_', ' ') for s in trophy_categ_list]
|
|
|
|
trophy_title = contributor_helper.trophy_title
|
|
|
|
|
2017-11-06 09:58:59 +01:00
|
|
|
currOrg = request.args.get('org')
|
|
|
|
if currOrg is None:
|
|
|
|
currOrg = ""
|
2017-10-30 16:28:32 +01:00
|
|
|
return render_template('contrib.html',
|
2017-11-06 09:58:59 +01:00
|
|
|
currOrg=currOrg,
|
2017-11-06 10:42:51 +01:00
|
|
|
rankMultiplier=contributor_helper.rankMultiplier,
|
2017-11-06 17:14:10 +01:00
|
|
|
default_pnts_per_contribution=contributor_helper.default_pnts_per_contribution,
|
|
|
|
additional_help_text=json.loads(cfg.get('CONTRIB', 'additional_help_text')),
|
2017-11-02 16:10:40 +01:00
|
|
|
categ_list=json.dumps(categ_list),
|
2017-11-06 15:00:51 +01:00
|
|
|
categ_list_str=categ_list_str,
|
2017-11-06 17:14:10 +01:00
|
|
|
categ_list_points=categ_list_points,
|
2017-11-07 12:54:08 +01:00
|
|
|
org_rank_json=json.dumps(org_rank),
|
|
|
|
org_rank_list=org_rank_list,
|
2017-11-08 11:29:07 +01:00
|
|
|
org_rank_additional_text=org_rank_additional_text,
|
|
|
|
org_honor_badge_title=json.dumps(org_honor_badge_title),
|
|
|
|
org_honor_badge_title_list=org_honor_badge_title_list,
|
2017-11-13 12:57:05 +01:00
|
|
|
trophy_categ_list=json.dumps(trophy_categ_list),
|
|
|
|
trophy_categ_list_id=trophy_categ_list,
|
|
|
|
trophy_categ_list_str=trophy_categ_list_str,
|
|
|
|
trophy_title=json.dumps(trophy_title),
|
2017-11-06 15:00:51 +01:00
|
|
|
min_between_reload=cfg.getint('CONTRIB', 'min_between_reload')
|
2017-10-30 16:28:32 +01:00
|
|
|
)
|
|
|
|
|
2017-11-14 15:44:53 +01:00
|
|
|
@app.route("/users")
|
|
|
|
def users():
|
|
|
|
return render_template('users.html',
|
|
|
|
)
|
|
|
|
|
2017-11-14 17:03:46 +01:00
|
|
|
|
2017-11-16 12:23:02 +01:00
|
|
|
@app.route("/trendings")
|
|
|
|
def trendings():
|
2017-11-21 15:20:07 +01:00
|
|
|
maxNum = request.args.get('maxNum')
|
|
|
|
try:
|
|
|
|
maxNum = int(maxNum)
|
|
|
|
except:
|
|
|
|
maxNum = 15
|
|
|
|
|
2017-11-16 12:23:02 +01:00
|
|
|
return render_template('trendings.html',
|
2017-11-21 15:20:07 +01:00
|
|
|
maxNum=maxNum
|
2017-11-16 12:23:02 +01:00
|
|
|
)
|
|
|
|
|
2017-11-03 09:35:51 +01:00
|
|
|
''' INDEX '''
|
|
|
|
|
|
|
|
@app.route("/_logs")
|
|
|
|
def logs():
|
|
|
|
return Response(event_stream_log(), mimetype="text/event-stream")
|
|
|
|
|
|
|
|
@app.route("/_maps")
|
|
|
|
def maps():
|
|
|
|
return Response(event_stream_maps(), mimetype="text/event-stream")
|
|
|
|
|
|
|
|
@app.route("/_get_log_head")
|
|
|
|
def getLogHead():
|
|
|
|
return json.dumps(LogItem('').get_head_row())
|
|
|
|
|
|
|
|
def event_stream_log():
|
|
|
|
for msg in subscriber_log.listen():
|
|
|
|
content = msg['data']
|
|
|
|
yield EventMessage(content).to_json()
|
|
|
|
|
|
|
|
def event_stream_maps():
|
|
|
|
for msg in subscriber_map.listen():
|
|
|
|
content = msg['data'].decode('utf8')
|
|
|
|
yield 'data: {}\n\n'.format(content)
|
|
|
|
|
|
|
|
''' GEO '''
|
|
|
|
|
|
|
|
@app.route("/_getTopCoord")
|
|
|
|
def getTopCoord():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
keyCateg = "GEO_COORD"
|
|
|
|
topNum = 6 # default Num
|
|
|
|
data = getZrange(keyCateg, date, topNum)
|
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getHitMap")
|
|
|
|
def getHitMap():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
keyCateg = "GEO_COUNTRY"
|
2017-11-06 09:50:28 +01:00
|
|
|
topNum = 0 # all
|
2017-11-03 09:35:51 +01:00
|
|
|
data = getZrange(keyCateg, date, topNum)
|
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
def isCloseTo(coord1, coord2):
|
|
|
|
clusterMeter = cfg.getfloat('GEO' ,'clusteringDistance')
|
|
|
|
clusterThres = math.pow(10, len(str(abs(clusterMeter)))-7) #map meter to coord threshold (~ big approx)
|
|
|
|
if abs(float(coord1[0]) - float(coord2[0])) <= clusterThres:
|
|
|
|
if abs(float(coord1[1]) - float(coord2[1])) <= clusterThres:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
@app.route("/_getCoordsByRadius")
|
|
|
|
def getCoordsByRadius():
|
|
|
|
dico_coord = {}
|
|
|
|
to_return = []
|
|
|
|
try:
|
|
|
|
dateStart = datetime.datetime.fromtimestamp(float(request.args.get('dateStart')))
|
|
|
|
dateEnd = datetime.datetime.fromtimestamp(float(request.args.get('dateEnd')))
|
|
|
|
centerLat = request.args.get('centerLat')
|
|
|
|
centerLon = request.args.get('centerLon')
|
|
|
|
radius = int(math.ceil(float(request.args.get('radius'))))
|
|
|
|
except:
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
|
|
|
delta = dateEnd - dateStart
|
|
|
|
for i in range(delta.days+1):
|
|
|
|
correctDatetime = dateStart + datetime.timedelta(days=i)
|
2017-11-06 10:42:51 +01:00
|
|
|
date_str = util.getDateStrFormat(correctDatetime)
|
2017-11-03 09:35:51 +01:00
|
|
|
keyCateg = 'GEO_RAD'
|
|
|
|
keyname = "{}:{}".format(keyCateg, date_str)
|
|
|
|
res = serv_redis_db.georadius(keyname, centerLon, centerLat, radius, unit='km', withcoord=True)
|
|
|
|
|
|
|
|
#sum up really close coord
|
|
|
|
for data, coord in res:
|
|
|
|
flag_added = False
|
|
|
|
coord = [coord[0], coord[1]]
|
|
|
|
#list all coord
|
|
|
|
for dicoCoordStr in dico_coord.keys():
|
|
|
|
dicoCoord = json.loads(dicoCoordStr)
|
|
|
|
#if curCoord close to coord
|
|
|
|
if isCloseTo(dicoCoord, coord):
|
|
|
|
#add data to dico coord
|
|
|
|
dico_coord[dicoCoordStr].append(data)
|
|
|
|
flag_added = True
|
|
|
|
break
|
|
|
|
# coord not in dic
|
|
|
|
if not flag_added:
|
|
|
|
dico_coord[str(coord)] = [data]
|
|
|
|
|
|
|
|
for dicoCoord, array in dico_coord.items():
|
|
|
|
dicoCoord = json.loads(dicoCoord)
|
|
|
|
to_return.append([array, dicoCoord])
|
|
|
|
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
|
|
|
''' CONTRIB '''
|
|
|
|
|
2017-11-06 13:43:55 +01:00
|
|
|
@app.route("/_getLastContributors")
|
|
|
|
def getLastContributors():
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getLastContributorsFromRedis())
|
2017-11-06 13:43:55 +01:00
|
|
|
|
|
|
|
@app.route("/_eventStreamLastContributor")
|
2017-10-31 15:08:44 +01:00
|
|
|
def getLastContributor():
|
2017-11-06 13:43:55 +01:00
|
|
|
return Response(eventStreamLastContributor(), mimetype="text/event-stream")
|
|
|
|
|
2017-11-13 16:26:09 +01:00
|
|
|
@app.route("/_eventStreamAwards")
|
|
|
|
def getLastStreamAwards():
|
|
|
|
return Response(eventStreamAwards(), mimetype="text/event-stream")
|
|
|
|
|
2017-11-06 13:43:55 +01:00
|
|
|
def eventStreamLastContributor():
|
|
|
|
for msg in subscriber_lastContrib.listen():
|
|
|
|
content = msg['data'].decode('utf8')
|
|
|
|
contentJson = json.loads(content)
|
|
|
|
lastContribJson = json.loads(contentJson['log'])
|
|
|
|
org = lastContribJson['org']
|
|
|
|
to_return = contributor_helper.getContributorFromRedis(org)
|
|
|
|
epoch = lastContribJson['epoch']
|
|
|
|
to_return['epoch'] = epoch
|
|
|
|
yield 'data: {}\n\n'.format(json.dumps(to_return))
|
2017-10-31 15:08:44 +01:00
|
|
|
|
2017-11-13 16:26:09 +01:00
|
|
|
def eventStreamAwards():
|
|
|
|
for msg in subscriber_lastAwards.listen():
|
|
|
|
content = msg['data'].decode('utf8')
|
|
|
|
contentJson = json.loads(content)
|
2017-11-14 14:56:15 +01:00
|
|
|
lastAwardJson = json.loads(contentJson['log'])
|
|
|
|
org = lastAwardJson['org']
|
2017-11-13 16:26:09 +01:00
|
|
|
to_return = contributor_helper.getContributorFromRedis(org)
|
2017-11-14 14:56:15 +01:00
|
|
|
epoch = lastAwardJson['epoch']
|
2017-11-13 16:26:09 +01:00
|
|
|
to_return['epoch'] = epoch
|
2017-11-14 14:56:15 +01:00
|
|
|
to_return['award'] = lastAwardJson['award']
|
2017-11-13 16:26:09 +01:00
|
|
|
yield 'data: {}\n\n'.format(json.dumps(to_return))
|
|
|
|
|
2017-10-30 16:28:32 +01:00
|
|
|
@app.route("/_getTopContributor")
|
2017-11-06 09:50:28 +01:00
|
|
|
def getTopContributor(suppliedDate=None):
|
|
|
|
if suppliedDate is None:
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
else:
|
|
|
|
date = suppliedDate
|
|
|
|
|
2017-11-09 10:58:18 +01:00
|
|
|
data = contributor_helper.getTopContributorFromRedis(date)
|
2017-11-06 09:50:28 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getFameContributor")
|
|
|
|
def getFameContributor():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
# get previous month
|
|
|
|
date = (datetime.datetime(today.year, today.month, 1) - datetime.timedelta(days=1))
|
2017-11-06 10:42:51 +01:00
|
|
|
return getTopContributor(suppliedDate=date)
|
2017-11-06 09:50:28 +01:00
|
|
|
|
2017-11-14 15:44:53 +01:00
|
|
|
@app.route("/_getFameQualContributor")
|
|
|
|
def getFameQualContributor():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
# get previous month
|
|
|
|
date = (datetime.datetime(today.year, today.month, 1) - datetime.timedelta(days=1))
|
|
|
|
return getTopContributor(suppliedDate=date)
|
2017-10-30 16:28:32 +01:00
|
|
|
|
|
|
|
@app.route("/_getTop5Overtime")
|
|
|
|
def getTop5Overtime():
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getTop5OvertimeFromRedis())
|
2017-10-30 16:28:32 +01:00
|
|
|
|
2017-11-09 10:32:39 +01:00
|
|
|
@app.route("/_getOrgOvertime")
|
|
|
|
def getOrgOvertime():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getOrgOvertime(org))
|
2017-11-09 10:32:39 +01:00
|
|
|
|
2017-10-31 12:35:05 +01:00
|
|
|
@app.route("/_getCategPerContrib")
|
|
|
|
def getCategPerContrib():
|
2017-11-06 09:50:28 +01:00
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
2017-11-02 16:10:40 +01:00
|
|
|
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getCategPerContribFromRedis(date))
|
2017-11-13 16:02:09 +01:00
|
|
|
|
|
|
|
@app.route("/_getLatestAwards")
|
|
|
|
def getLatestAwards():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
2017-11-14 09:50:20 +01:00
|
|
|
return jsonify(contributor_helper.getLastAwardsFromRedis())
|
2017-10-31 12:35:05 +01:00
|
|
|
|
2017-10-31 15:49:04 +01:00
|
|
|
@app.route("/_getAllOrg")
|
|
|
|
def getAllOrg():
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getAllOrgFromRedis())
|
2017-10-31 15:49:04 +01:00
|
|
|
|
2017-11-02 10:57:16 +01:00
|
|
|
@app.route("/_getOrgRank")
|
|
|
|
def getOrgRank():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getCurrentOrgRankFromRedis(org))
|
2017-11-08 15:34:02 +01:00
|
|
|
|
|
|
|
@app.route("/_getContributionOrgStatus")
|
|
|
|
def getContributionOrgStatus():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getCurrentContributionStatus(org))
|
2017-11-08 15:34:02 +01:00
|
|
|
|
|
|
|
@app.route("/_getHonorBadges")
|
|
|
|
def getHonorBadges():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-09 10:58:18 +01:00
|
|
|
return jsonify(contributor_helper.getOrgHonorBadges(org))
|
2017-11-02 10:57:16 +01:00
|
|
|
|
2017-11-13 12:57:05 +01:00
|
|
|
@app.route("/_getTrophies")
|
|
|
|
def getTrophies():
|
|
|
|
try:
|
|
|
|
org = request.args.get('org')
|
|
|
|
except:
|
|
|
|
org = ''
|
2017-11-14 09:50:20 +01:00
|
|
|
return jsonify(contributor_helper.getOrgTrophies(org))
|
2017-11-13 12:57:05 +01:00
|
|
|
|
2017-11-15 13:45:01 +01:00
|
|
|
|
|
|
|
''' USERS '''
|
|
|
|
|
|
|
|
@app.route("/_getUserLogins")
|
|
|
|
def getUserLogins():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = users_helper.getUserLoginsForPunchCard(date)
|
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getUserLoginsOvertime")
|
|
|
|
def getUserLoginsOvertime():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = users_helper.getUserLoginsOvertime(date)
|
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getTopOrglogin")
|
|
|
|
def getTopOrglogin():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
2017-11-15 15:40:28 +01:00
|
|
|
data = users_helper.getTopOrglogin(date, maxNum=12)
|
2017-11-15 13:45:01 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-15 14:34:37 +01:00
|
|
|
@app.route("/_getLoginVSCOntribution")
|
|
|
|
def getLoginVSCOntribution():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = users_helper.getLoginVSCOntribution(date)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-21 11:59:07 +01:00
|
|
|
@app.route("/_getUserLoginsAndContribOvertime")
|
|
|
|
def getUserLoginsAndContribOvertime():
|
|
|
|
try:
|
|
|
|
date = datetime.datetime.fromtimestamp(float(request.args.get('date')))
|
|
|
|
except:
|
|
|
|
date = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = users_helper.getUserLoginsAndContribOvertime(date)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-16 15:08:14 +01:00
|
|
|
''' TRENDINGS '''
|
|
|
|
@app.route("/_getTrendingEvents")
|
|
|
|
def getTrendingEvents():
|
|
|
|
try:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
2017-11-16 15:08:14 +01:00
|
|
|
except:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
2017-11-16 15:08:14 +01:00
|
|
|
|
2017-11-21 08:42:54 +01:00
|
|
|
specificLabel = request.args.get('specificLabel')
|
|
|
|
data = trendings_helper.getTrendingEvents(dateS, dateE, specificLabel)
|
2017-11-16 15:08:14 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getTrendingCategs")
|
|
|
|
def getTrendingCategs():
|
|
|
|
try:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
2017-11-16 15:08:14 +01:00
|
|
|
except:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
2017-11-16 15:08:14 +01:00
|
|
|
|
2017-11-17 09:13:18 +01:00
|
|
|
|
|
|
|
data = trendings_helper.getTrendingCategs(dateS, dateE)
|
2017-11-16 15:08:14 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
|
|
|
@app.route("/_getTrendingTags")
|
|
|
|
def getTrendingTags():
|
|
|
|
try:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
2017-11-16 15:08:14 +01:00
|
|
|
except:
|
2017-11-17 09:13:18 +01:00
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
2017-11-16 15:08:14 +01:00
|
|
|
|
2017-11-17 09:13:18 +01:00
|
|
|
data = trendings_helper.getTrendingTags(dateS, dateE)
|
2017-11-16 15:08:14 +01:00
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-17 14:45:20 +01:00
|
|
|
@app.route("/_getTrendingSightings")
|
|
|
|
def getTrendingSightings():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = trendings_helper.getTrendingSightings(dateS, dateE)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-11-17 16:50:04 +01:00
|
|
|
@app.route("/_getTrendingDisc")
|
|
|
|
def getTrendingDisc():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
|
|
|
|
|
|
|
data = trendings_helper.getTrendingDisc(dateS, dateE)
|
|
|
|
return jsonify(data)
|
2017-11-15 14:34:37 +01:00
|
|
|
|
2017-11-20 17:42:25 +01:00
|
|
|
@app.route("/_getTypeaheadData")
|
|
|
|
def getTypeaheadData():
|
|
|
|
try:
|
|
|
|
dateS = datetime.datetime.fromtimestamp(float(request.args.get('dateS')))
|
|
|
|
dateE = datetime.datetime.fromtimestamp(float(request.args.get('dateE')))
|
|
|
|
except:
|
|
|
|
dateS = datetime.datetime.now() - datetime.timedelta(days=7)
|
|
|
|
dateE = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = trendings_helper.getTypeaheadData(dateS, dateE)
|
|
|
|
return jsonify(data)
|
|
|
|
|
2017-08-24 07:25:13 +02:00
|
|
|
if __name__ == '__main__':
|
2017-10-27 12:05:14 +02:00
|
|
|
app.run(host='localhost', port=8001, threaded=True)
|