2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-12-09 08:46:37 +01:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
'''
|
|
|
|
Flask functions and routes for the trending modules page
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-07-20 10:24:48 +02:00
|
|
|
note: The matching of credential against supplied credential is done using Levenshtein distance
|
2016-12-09 08:46:37 +01:00
|
|
|
'''
|
|
|
|
import redis
|
|
|
|
import datetime
|
|
|
|
import calendar
|
|
|
|
import flask
|
2018-11-07 15:37:25 +01:00
|
|
|
from flask import Flask, render_template, jsonify, request, Blueprint, url_for, redirect
|
2017-03-28 17:42:44 +02:00
|
|
|
import re
|
2016-12-09 08:46:37 +01:00
|
|
|
import Paste
|
2017-07-18 16:57:15 +02:00
|
|
|
from pprint import pprint
|
2017-07-19 11:52:06 +02:00
|
|
|
import Levenshtein
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
# ============ VARIABLES ============
|
|
|
|
import Flask_config
|
|
|
|
|
|
|
|
app = Flask_config.app
|
|
|
|
cfg = Flask_config.cfg
|
2018-09-20 10:38:19 +02:00
|
|
|
baseUrl = Flask_config.baseUrl
|
2016-12-09 08:46:37 +01:00
|
|
|
r_serv_term = Flask_config.r_serv_term
|
2017-07-18 16:57:15 +02:00
|
|
|
r_serv_cred = Flask_config.r_serv_cred
|
2018-11-06 16:08:58 +01:00
|
|
|
r_serv_db = Flask_config.r_serv_db
|
2018-11-06 13:38:37 +01:00
|
|
|
bootstrap_label = Flask_config.bootstrap_label
|
2017-03-28 17:42:44 +02:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
terms = Blueprint('terms', __name__, template_folder='templates')
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
'''TERM'''
|
2017-03-28 17:42:44 +02:00
|
|
|
DEFAULT_MATCH_PERCENT = 50
|
|
|
|
|
|
|
|
#tracked
|
|
|
|
TrackedTermsSet_Name = "TrackedSetTermSet"
|
|
|
|
TrackedTermsDate_Name = "TrackedTermDate"
|
|
|
|
#black
|
|
|
|
BlackListTermsDate_Name = "BlackListTermDate"
|
|
|
|
BlackListTermsSet_Name = "BlackListSetTermSet"
|
|
|
|
#regex
|
|
|
|
TrackedRegexSet_Name = "TrackedRegexSet"
|
|
|
|
TrackedRegexDate_Name = "TrackedRegexDate"
|
|
|
|
#set
|
|
|
|
TrackedSetSet_Name = "TrackedSetSet"
|
|
|
|
TrackedSetDate_Name = "TrackedSetDate"
|
|
|
|
|
2018-03-01 08:50:27 +01:00
|
|
|
# notifications enabled/disabled
|
2018-02-27 15:12:02 +01:00
|
|
|
# same value as in `bin/NotificationHelper.py`
|
|
|
|
TrackedTermsNotificationEnabled_Name = "TrackedNotifications"
|
|
|
|
|
|
|
|
# associated notification email addresses for a specific term`
|
|
|
|
# same value as in `bin/NotificationHelper.py`
|
|
|
|
# Keys will be e.g. TrackedNotificationEmails_<TERMNAME>
|
|
|
|
TrackedTermsNotificationEmailsPrefix_Name = "TrackedNotificationEmails_"
|
2018-11-06 13:38:37 +01:00
|
|
|
TrackedTermsNotificationTagsPrefix_Name = "TrackedNotificationTags_"
|
2017-07-20 10:04:30 +02:00
|
|
|
|
|
|
|
'''CRED'''
|
|
|
|
REGEX_CRED = '[a-z]+|[A-Z]{3,}|[A-Z]{1,2}[a-z]+|[0-9]+'
|
|
|
|
REDIS_KEY_NUM_USERNAME = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_NUM_PATH = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_ALL_CRED_SET = 'AllCredentials'
|
|
|
|
REDIS_KEY_ALL_CRED_SET_REV = 'AllCredentialsRev'
|
|
|
|
REDIS_KEY_ALL_PATH_SET = 'AllPath'
|
|
|
|
REDIS_KEY_ALL_PATH_SET_REV = 'AllPathRev'
|
|
|
|
REDIS_KEY_MAP_CRED_TO_PATH = 'CredToPathMapping'
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ FUNCTIONS ============
|
|
|
|
|
2017-02-28 15:54:39 +01:00
|
|
|
def Term_getValueOverRange(word, startDate, num_day, per_paste=""):
|
2016-12-09 08:46:37 +01:00
|
|
|
passed_days = 0
|
|
|
|
oneDay = 60*60*24
|
|
|
|
to_return = []
|
|
|
|
curr_to_return = 0
|
|
|
|
for timestamp in range(startDate, startDate - max(num_day)*oneDay, -oneDay):
|
2017-02-28 15:54:39 +01:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), word)
|
2018-05-04 13:53:29 +02:00
|
|
|
curr_to_return += int(value) if value is not None else 0
|
2016-12-09 08:46:37 +01:00
|
|
|
for i in num_day:
|
|
|
|
if passed_days == i-1:
|
|
|
|
to_return.append(curr_to_return)
|
|
|
|
passed_days += 1
|
|
|
|
return to_return
|
|
|
|
|
2017-07-20 10:50:24 +02:00
|
|
|
#Mix suplied username, if extensive is set, slice username(s) with different windows
|
2017-07-20 10:04:30 +02:00
|
|
|
def mixUserName(supplied, extensive=False):
|
2017-07-19 11:52:06 +02:00
|
|
|
#e.g.: John Smith
|
|
|
|
terms = supplied.split()[:2]
|
|
|
|
usernames = []
|
|
|
|
if len(terms) == 1:
|
|
|
|
terms.append(' ')
|
|
|
|
|
|
|
|
#john, smith, John, Smith, JOHN, SMITH
|
|
|
|
usernames += [terms[0].lower()]
|
|
|
|
usernames += [terms[1].lower()]
|
|
|
|
usernames += [terms[0][0].upper() + terms[0][1:].lower()]
|
|
|
|
usernames += [terms[1][0].upper() + terms[1][1:].lower()]
|
|
|
|
usernames += [terms[0].upper()]
|
|
|
|
usernames += [terms[1].upper()]
|
|
|
|
|
|
|
|
#johnsmith, smithjohn, JOHNsmith, johnSMITH, SMITHjohn, smithJOHN
|
|
|
|
usernames += [(terms[0].lower() + terms[1].lower()).strip()]
|
|
|
|
usernames += [(terms[1].lower() + terms[0].lower()).strip()]
|
|
|
|
usernames += [(terms[0].upper() + terms[1].lower()).strip()]
|
|
|
|
usernames += [(terms[0].lower() + terms[1].upper()).strip()]
|
|
|
|
usernames += [(terms[1].upper() + terms[0].lower()).strip()]
|
|
|
|
usernames += [(terms[1].lower() + terms[0].upper()).strip()]
|
|
|
|
#Jsmith, JSmith, jsmith, jSmith, johnS, Js, JohnSmith, Johnsmith, johnSmith
|
|
|
|
usernames += [(terms[0][0].upper() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0].lower() + terms[1][0].upper()).strip()]
|
|
|
|
usernames += [(terms[0].upper() + terms[1][0].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[0][1:].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[0][1:].lower() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[0][1:].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
if not extensive:
|
|
|
|
return usernames
|
|
|
|
|
2017-07-20 10:50:24 +02:00
|
|
|
#Slice the supplied username(s)
|
2017-07-20 10:04:30 +02:00
|
|
|
mixedSupplied = supplied.replace(' ','')
|
|
|
|
minWindow = 3 if len(mixedSupplied)/2 < 4 else len(mixedSupplied)/2
|
|
|
|
for winSize in range(3,len(mixedSupplied)):
|
|
|
|
for startIndex in range(0, len(mixedSupplied)-winSize):
|
|
|
|
usernames += [mixedSupplied[startIndex:startIndex+winSize]]
|
|
|
|
|
|
|
|
filtered_usernames = []
|
|
|
|
for usr in usernames:
|
|
|
|
if len(usr) > 2:
|
|
|
|
filtered_usernames.append(usr)
|
|
|
|
return filtered_usernames
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-11-06 16:08:58 +01:00
|
|
|
def save_tag_to_auto_push(list_tag):
|
|
|
|
for tag in set(list_tag):
|
2018-11-07 10:17:57 +01:00
|
|
|
#limit tag length
|
|
|
|
if len(tag) > 49:
|
|
|
|
tag = tag[0:48]
|
2018-11-06 16:08:58 +01:00
|
|
|
r_serv_db.sadd('list_export_tags', tag)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
# ============ ROUTES ============
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_management/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_management():
|
2017-02-28 15:54:39 +01:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
if per_paste == "1" or per_paste is None:
|
|
|
|
per_paste_text = "per_paste_"
|
|
|
|
per_paste = 1
|
|
|
|
else:
|
|
|
|
per_paste_text = ""
|
|
|
|
per_paste = 0
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
today = datetime.datetime.now()
|
|
|
|
today = today.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
today_timestamp = calendar.timegm(today.timetuple())
|
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
# Map tracking if notifications are enabled for a specific term
|
|
|
|
notificationEnabledDict = {}
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
# Maps a specific term to the associated email addresses
|
|
|
|
notificationEMailTermMapping = {}
|
2018-11-06 13:38:37 +01:00
|
|
|
notificationTagsTermMapping = {}
|
2018-02-27 15:12:02 +01:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
#Regex
|
|
|
|
trackReg_list = []
|
|
|
|
trackReg_list_values = []
|
|
|
|
trackReg_list_num_of_paste = []
|
|
|
|
for tracked_regex in r_serv_term.smembers(TrackedRegexSet_Name):
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-11-07 11:42:31 +01:00
|
|
|
notificationEMailTermMapping[tracked_regex] = r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_regex)
|
2018-11-06 13:38:37 +01:00
|
|
|
notificationTagsTermMapping[tracked_regex] = r_serv_term.smembers(TrackedTermsNotificationTagsPrefix_Name + tracked_regex)
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
if tracked_regex not in notificationEnabledDict:
|
|
|
|
notificationEnabledDict[tracked_regex] = False
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
trackReg_list.append(tracked_regex)
|
|
|
|
value_range = Term_getValueOverRange(tracked_regex, today_timestamp, [1, 7, 31], per_paste=per_paste_text)
|
|
|
|
|
|
|
|
term_date = r_serv_term.hget(TrackedRegexDate_Name, tracked_regex)
|
|
|
|
|
|
|
|
set_paste_name = "regex_" + tracked_regex
|
|
|
|
trackReg_list_num_of_paste.append(r_serv_term.scard(set_paste_name))
|
|
|
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
|
|
|
value_range.append(term_date)
|
|
|
|
trackReg_list_values.append(value_range)
|
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
if tracked_regex in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
2018-02-27 15:12:02 +01:00
|
|
|
notificationEnabledDict[tracked_regex] = True
|
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
#Set
|
|
|
|
trackSet_list = []
|
|
|
|
trackSet_list_values = []
|
|
|
|
trackSet_list_num_of_paste = []
|
|
|
|
for tracked_set in r_serv_term.smembers(TrackedSetSet_Name):
|
2018-05-04 13:53:29 +02:00
|
|
|
tracked_set = tracked_set
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-11-07 11:42:31 +01:00
|
|
|
notificationEMailTermMapping[tracked_set] = r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_set)
|
2018-11-06 13:38:37 +01:00
|
|
|
notificationTagsTermMapping[tracked_set] = r_serv_term.smembers(TrackedTermsNotificationTagsPrefix_Name + tracked_set)
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
if tracked_set not in notificationEnabledDict:
|
|
|
|
notificationEnabledDict[tracked_set] = False
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
trackSet_list.append(tracked_set)
|
2017-04-18 15:28:21 +02:00
|
|
|
value_range = Term_getValueOverRange(tracked_set, today_timestamp, [1, 7, 31], per_paste=per_paste_text)
|
2017-03-28 17:42:44 +02:00
|
|
|
|
|
|
|
term_date = r_serv_term.hget(TrackedSetDate_Name, tracked_set)
|
|
|
|
|
|
|
|
set_paste_name = "set_" + tracked_set
|
|
|
|
trackSet_list_num_of_paste.append(r_serv_term.scard(set_paste_name))
|
|
|
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
|
|
|
value_range.append(term_date)
|
|
|
|
trackSet_list_values.append(value_range)
|
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
if tracked_set in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
2018-02-27 15:12:02 +01:00
|
|
|
notificationEnabledDict[tracked_set] = True
|
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
#Tracked terms
|
2016-12-09 08:46:37 +01:00
|
|
|
track_list = []
|
|
|
|
track_list_values = []
|
|
|
|
track_list_num_of_paste = []
|
|
|
|
for tracked_term in r_serv_term.smembers(TrackedTermsSet_Name):
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-11-07 11:42:31 +01:00
|
|
|
notificationEMailTermMapping[tracked_term] = r_serv_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + tracked_term)
|
2018-11-06 13:38:37 +01:00
|
|
|
notificationTagsTermMapping[tracked_term] = r_serv_term.smembers(TrackedTermsNotificationTagsPrefix_Name + tracked_term)
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
if tracked_term not in notificationEnabledDict:
|
|
|
|
notificationEnabledDict[tracked_term] = False
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
track_list.append(tracked_term)
|
2017-02-28 15:54:39 +01:00
|
|
|
value_range = Term_getValueOverRange(tracked_term, today_timestamp, [1, 7, 31], per_paste=per_paste_text)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
term_date = r_serv_term.hget(TrackedTermsDate_Name, tracked_term)
|
|
|
|
|
|
|
|
set_paste_name = "tracked_" + tracked_term
|
2018-05-04 13:53:29 +02:00
|
|
|
|
2018-04-20 10:42:19 +02:00
|
|
|
track_list_num_of_paste.append( r_serv_term.scard(set_paste_name) )
|
2018-05-04 13:53:29 +02:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
|
|
|
value_range.append(term_date)
|
|
|
|
track_list_values.append(value_range)
|
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
if tracked_term in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
2018-02-27 15:12:02 +01:00
|
|
|
notificationEnabledDict[tracked_term] = True
|
2016-12-09 08:46:37 +01:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
#blacklist terms
|
2016-12-09 08:46:37 +01:00
|
|
|
black_list = []
|
|
|
|
for blacked_term in r_serv_term.smembers(BlackListTermsSet_Name):
|
|
|
|
term_date = r_serv_term.hget(BlackListTermsDate_Name, blacked_term)
|
|
|
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
2018-05-04 13:53:29 +02:00
|
|
|
black_list.append([blacked_term, term_date])
|
2016-12-09 08:46:37 +01:00
|
|
|
|
2018-02-28 09:19:27 +01:00
|
|
|
return render_template("terms_management.html",
|
2017-03-28 17:42:44 +02:00
|
|
|
black_list=black_list, track_list=track_list, trackReg_list=trackReg_list, trackSet_list=trackSet_list,
|
2018-02-28 09:19:27 +01:00
|
|
|
track_list_values=track_list_values, track_list_num_of_paste=track_list_num_of_paste,
|
2017-03-28 17:42:44 +02:00
|
|
|
trackReg_list_values=trackReg_list_values, trackReg_list_num_of_paste=trackReg_list_num_of_paste,
|
|
|
|
trackSet_list_values=trackSet_list_values, trackSet_list_num_of_paste=trackSet_list_num_of_paste,
|
2018-11-06 13:38:37 +01:00
|
|
|
per_paste=per_paste, notificationEnabledDict=notificationEnabledDict, bootstrap_label=bootstrap_label,
|
|
|
|
notificationEMailTermMapping=notificationEMailTermMapping, notificationTagsTermMapping=notificationTagsTermMapping)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_management_query_paste/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_management_query_paste():
|
|
|
|
term = request.args.get('term')
|
|
|
|
paste_info = []
|
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
# check if regex or not
|
|
|
|
if term.startswith('/') and term.endswith('/'):
|
|
|
|
set_paste_name = "regex_" + term
|
2018-04-20 10:42:19 +02:00
|
|
|
track_list_path = r_serv_term.smembers(set_paste_name)
|
2017-03-28 17:42:44 +02:00
|
|
|
elif term.startswith('\\') and term.endswith('\\'):
|
|
|
|
set_paste_name = "set_" + term
|
2018-04-20 10:42:19 +02:00
|
|
|
track_list_path = r_serv_term.smembers(set_paste_name)
|
2017-03-28 17:42:44 +02:00
|
|
|
else:
|
|
|
|
set_paste_name = "tracked_" + term
|
2018-04-20 10:42:19 +02:00
|
|
|
track_list_path = r_serv_term.smembers(set_paste_name)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
for path in track_list_path:
|
|
|
|
paste = Paste.Paste(path)
|
|
|
|
p_date = str(paste._get_p_date())
|
2018-07-05 14:45:34 +02:00
|
|
|
p_date = p_date[0:4]+'/'+p_date[4:6]+'/'+p_date[6:8]
|
2016-12-09 08:46:37 +01:00
|
|
|
p_source = paste.p_source
|
|
|
|
p_size = paste.p_size
|
|
|
|
p_mime = paste.p_mime
|
|
|
|
p_lineinfo = paste.get_lines_info()
|
2018-04-17 16:06:32 +02:00
|
|
|
p_content = paste.get_p_content()
|
2016-12-09 08:46:37 +01:00
|
|
|
if p_content != 0:
|
|
|
|
p_content = p_content[0:400]
|
2019-04-15 13:27:46 +02:00
|
|
|
paste_info.append({"path": path, "date": p_date, "source": p_source, "size": p_size, "mime": p_mime, "lineinfo": p_lineinfo, "content": p_content})
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
return jsonify(paste_info)
|
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_management_query/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_management_query():
|
|
|
|
TrackedTermsDate_Name = "TrackedTermDate"
|
|
|
|
BlackListTermsDate_Name = "BlackListTermDate"
|
|
|
|
term = request.args.get('term')
|
|
|
|
section = request.args.get('section')
|
|
|
|
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
today = today.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
today_timestamp = calendar.timegm(today.timetuple())
|
|
|
|
value_range = Term_getValueOverRange(term, today_timestamp, [1, 7, 31])
|
|
|
|
|
|
|
|
if section == "followTerm":
|
|
|
|
term_date = r_serv_term.hget(TrackedTermsDate_Name, term)
|
|
|
|
elif section == "blacklistTerm":
|
|
|
|
term_date = r_serv_term.hget(BlackListTermsDate_Name, term)
|
|
|
|
|
|
|
|
term_date = datetime.datetime.utcfromtimestamp(int(term_date)) if term_date is not None else "No date recorded"
|
|
|
|
value_range.append(str(term_date))
|
|
|
|
return jsonify(value_range)
|
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_management_action/", methods=['GET'])
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_management_action():
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
today = today.replace(microsecond=0)
|
|
|
|
today_timestamp = calendar.timegm(today.timetuple())
|
|
|
|
|
|
|
|
|
|
|
|
section = request.args.get('section')
|
|
|
|
action = request.args.get('action')
|
|
|
|
term = request.args.get('term')
|
2018-02-27 15:12:02 +01:00
|
|
|
notificationEmailsParam = request.args.get('emailAddresses')
|
2018-11-06 13:38:37 +01:00
|
|
|
input_tags = request.args.get('tags')
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-04-17 16:06:32 +02:00
|
|
|
if action is None or term is None or notificationEmailsParam is None:
|
2016-12-09 08:46:37 +01:00
|
|
|
return "None"
|
|
|
|
else:
|
|
|
|
if section == "followTerm":
|
|
|
|
if action == "add":
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
# Make a list of all passed email addresses
|
2018-11-07 11:42:31 +01:00
|
|
|
notificationEmails = notificationEmailsParam.split()
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
validNotificationEmails = []
|
|
|
|
# check for valid email addresses
|
|
|
|
for email in notificationEmails:
|
|
|
|
# Really basic validation:
|
|
|
|
# has exactly one @ sign, and at least one . in the part after the @
|
|
|
|
if re.match(r"[^@]+@[^@]+\.[^@]+", email):
|
|
|
|
validNotificationEmails.append(email)
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-11-06 13:38:37 +01:00
|
|
|
# create tags list
|
|
|
|
list_tags = input_tags.split()
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
# check if regex/set or simple term
|
|
|
|
#regex
|
|
|
|
if term.startswith('/') and term.endswith('/'):
|
|
|
|
r_serv_term.sadd(TrackedRegexSet_Name, term)
|
|
|
|
r_serv_term.hset(TrackedRegexDate_Name, term, today_timestamp)
|
2018-02-28 11:31:16 +01:00
|
|
|
# add all valid emails to the set
|
|
|
|
for email in validNotificationEmails:
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEmailsPrefix_Name + term, email)
|
|
|
|
# enable notifications by default
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEnabled_Name, term)
|
2018-11-06 13:38:37 +01:00
|
|
|
# add tags list
|
|
|
|
for tag in list_tags:
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationTagsPrefix_Name + term, tag)
|
2018-11-06 16:08:58 +01:00
|
|
|
save_tag_to_auto_push(list_tags)
|
2017-03-28 17:42:44 +02:00
|
|
|
|
|
|
|
#set
|
|
|
|
elif term.startswith('\\') and term.endswith('\\'):
|
|
|
|
tab_term = term[1:-1]
|
|
|
|
perc_finder = re.compile("\[[0-9]{1,3}\]").search(tab_term)
|
|
|
|
if perc_finder is not None:
|
|
|
|
match_percent = perc_finder.group(0)[1:-1]
|
|
|
|
set_to_add = term
|
|
|
|
else:
|
|
|
|
match_percent = DEFAULT_MATCH_PERCENT
|
|
|
|
set_to_add = "\\" + tab_term[:-1] + ", [{}]]\\".format(match_percent)
|
|
|
|
r_serv_term.sadd(TrackedSetSet_Name, set_to_add)
|
|
|
|
r_serv_term.hset(TrackedSetDate_Name, set_to_add, today_timestamp)
|
2018-02-28 11:31:16 +01:00
|
|
|
# add all valid emails to the set
|
|
|
|
for email in validNotificationEmails:
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEmailsPrefix_Name + set_to_add, email)
|
|
|
|
# enable notifications by default
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEnabled_Name, set_to_add)
|
2018-11-06 13:38:37 +01:00
|
|
|
# add tags list
|
|
|
|
for tag in list_tags:
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationTagsPrefix_Name + set_to_add, tag)
|
2018-11-06 16:08:58 +01:00
|
|
|
save_tag_to_auto_push(list_tags)
|
2017-03-28 17:42:44 +02:00
|
|
|
|
|
|
|
#simple term
|
|
|
|
else:
|
|
|
|
r_serv_term.sadd(TrackedTermsSet_Name, term.lower())
|
|
|
|
r_serv_term.hset(TrackedTermsDate_Name, term.lower(), today_timestamp)
|
2018-02-28 11:31:16 +01:00
|
|
|
# add all valid emails to the set
|
|
|
|
for email in validNotificationEmails:
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEmailsPrefix_Name + term.lower(), email)
|
|
|
|
# enable notifications by default
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEnabled_Name, term.lower())
|
2018-11-06 13:38:37 +01:00
|
|
|
# add tags list
|
|
|
|
for tag in list_tags:
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationTagsPrefix_Name + term.lower(), tag)
|
2018-11-06 16:08:58 +01:00
|
|
|
save_tag_to_auto_push(list_tags)
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
elif action == "toggleEMailNotification":
|
|
|
|
# get the current state
|
|
|
|
if term in r_serv_term.smembers(TrackedTermsNotificationEnabled_Name):
|
|
|
|
# remove it
|
|
|
|
r_serv_term.srem(TrackedTermsNotificationEnabled_Name, term.lower())
|
|
|
|
else:
|
|
|
|
# add it
|
|
|
|
r_serv_term.sadd(TrackedTermsNotificationEnabled_Name, term.lower())
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
#del action
|
2016-12-09 08:46:37 +01:00
|
|
|
else:
|
2017-03-28 17:42:44 +02:00
|
|
|
if term.startswith('/') and term.endswith('/'):
|
|
|
|
r_serv_term.srem(TrackedRegexSet_Name, term)
|
|
|
|
r_serv_term.hdel(TrackedRegexDate_Name, term)
|
|
|
|
elif term.startswith('\\') and term.endswith('\\'):
|
|
|
|
r_serv_term.srem(TrackedSetSet_Name, term)
|
|
|
|
r_serv_term.hdel(TrackedSetDate_Name, term)
|
|
|
|
else:
|
|
|
|
r_serv_term.srem(TrackedTermsSet_Name, term.lower())
|
|
|
|
r_serv_term.hdel(TrackedTermsDate_Name, term.lower())
|
|
|
|
|
2018-02-27 15:12:02 +01:00
|
|
|
# delete the associated notification emails too
|
|
|
|
r_serv_term.delete(TrackedTermsNotificationEmailsPrefix_Name + term)
|
2018-11-06 13:38:37 +01:00
|
|
|
# delete the associated tags set
|
|
|
|
r_serv_term.delete(TrackedTermsNotificationTagsPrefix_Name + term)
|
2018-02-27 15:12:02 +01:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
elif section == "blacklistTerm":
|
|
|
|
if action == "add":
|
|
|
|
r_serv_term.sadd(BlackListTermsSet_Name, term.lower())
|
|
|
|
r_serv_term.hset(BlackListTermsDate_Name, term, today_timestamp)
|
|
|
|
else:
|
|
|
|
r_serv_term.srem(BlackListTermsSet_Name, term.lower())
|
|
|
|
else:
|
|
|
|
return "None"
|
|
|
|
|
|
|
|
to_return = {}
|
|
|
|
to_return["section"] = section
|
|
|
|
to_return["action"] = action
|
|
|
|
to_return["term"] = term
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
2018-11-07 15:37:25 +01:00
|
|
|
@terms.route("/terms_management/delete_terms_tags", methods=['POST'])
|
|
|
|
def delete_terms_tags():
|
|
|
|
term = request.form.get('term')
|
|
|
|
tags_to_delete = request.form.getlist('tags_to_delete')
|
|
|
|
|
|
|
|
if term is not None and tags_to_delete is not None:
|
|
|
|
for tag in tags_to_delete:
|
|
|
|
r_serv_term.srem(TrackedTermsNotificationTagsPrefix_Name + term, tag)
|
|
|
|
return redirect(url_for('terms.terms_management'))
|
|
|
|
else:
|
|
|
|
return 'None args', 400
|
|
|
|
|
2018-11-07 11:42:31 +01:00
|
|
|
@terms.route("/terms_management/delete_terms_email", methods=['GET'])
|
|
|
|
def delete_terms_email():
|
|
|
|
term = request.args.get('term')
|
|
|
|
email = request.args.get('email')
|
|
|
|
|
|
|
|
if term is not None and email is not None:
|
|
|
|
r_serv_term.srem(TrackedTermsNotificationEmailsPrefix_Name + term, email)
|
2018-11-08 10:41:37 +01:00
|
|
|
return redirect(url_for('terms.terms_management'))
|
2018-11-07 11:42:31 +01:00
|
|
|
else:
|
|
|
|
return 'None args', 400
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_plot_tool/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_plot_tool():
|
|
|
|
term = request.args.get('term')
|
|
|
|
if term is not None:
|
|
|
|
return render_template("terms_plot_tool.html", term=term)
|
|
|
|
else:
|
|
|
|
return render_template("terms_plot_tool.html", term="")
|
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_plot_tool_data/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_plot_tool_data():
|
|
|
|
oneDay = 60*60*24
|
|
|
|
range_start = datetime.datetime.utcfromtimestamp(int(float(request.args.get('range_start')))) if request.args.get('range_start') is not None else 0;
|
|
|
|
range_start = range_start.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
range_start = calendar.timegm(range_start.timetuple())
|
|
|
|
range_end = datetime.datetime.utcfromtimestamp(int(float(request.args.get('range_end')))) if request.args.get('range_end') is not None else 0;
|
|
|
|
range_end = range_end.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
range_end = calendar.timegm(range_end.timetuple())
|
|
|
|
term = request.args.get('term')
|
|
|
|
|
2017-02-28 15:54:39 +01:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
if per_paste == "1" or per_paste is None:
|
|
|
|
per_paste = "per_paste_"
|
|
|
|
else:
|
|
|
|
per_paste = ""
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
if term is None:
|
|
|
|
return "None"
|
2017-07-20 10:04:30 +02:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
else:
|
|
|
|
value_range = []
|
|
|
|
for timestamp in range(range_start, range_end+oneDay, oneDay):
|
2017-02-28 15:54:39 +01:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), term)
|
2016-12-09 08:46:37 +01:00
|
|
|
curr_value_range = int(value) if value is not None else 0
|
|
|
|
value_range.append([timestamp, curr_value_range])
|
|
|
|
value_range.insert(0,term)
|
|
|
|
return jsonify(value_range)
|
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_plot_top/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_plot_top():
|
2017-02-15 16:29:02 +01:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
per_paste = per_paste if per_paste is not None else 1
|
|
|
|
return render_template("terms_plot_top.html", per_paste=per_paste)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@terms.route("/terms_plot_top_data/")
|
2016-12-09 08:46:37 +01:00
|
|
|
def terms_plot_top_data():
|
|
|
|
oneDay = 60*60*24
|
|
|
|
today = datetime.datetime.now()
|
|
|
|
today = today.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
today_timestamp = calendar.timegm(today.timetuple())
|
|
|
|
|
2017-02-28 15:54:39 +01:00
|
|
|
per_paste = request.args.get('per_paste')
|
|
|
|
if per_paste == "1" or per_paste is None:
|
2017-02-28 15:01:48 +01:00
|
|
|
per_paste = "per_paste_"
|
|
|
|
else:
|
|
|
|
per_paste = ""
|
|
|
|
|
|
|
|
set_day = per_paste + "TopTermFreq_set_day_" + str(today_timestamp)
|
|
|
|
set_week = per_paste + "TopTermFreq_set_week";
|
|
|
|
set_month = per_paste + "TopTermFreq_set_month";
|
|
|
|
|
|
|
|
the_set = per_paste + request.args.get('set')
|
|
|
|
num_day = int(request.args.get('num_day'))
|
2017-02-15 16:29:02 +01:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
if the_set is None:
|
|
|
|
return "None"
|
|
|
|
else:
|
|
|
|
to_return = []
|
2017-02-15 16:29:02 +01:00
|
|
|
if "TopTermFreq_set_day" in the_set:
|
2016-12-09 08:46:37 +01:00
|
|
|
the_set += "_" + str(today_timestamp)
|
|
|
|
|
|
|
|
for term, tot_value in r_serv_term.zrevrangebyscore(the_set, '+inf', '-inf', withscores=True, start=0, num=20):
|
|
|
|
position = {}
|
|
|
|
position['day'] = r_serv_term.zrevrank(set_day, term)
|
|
|
|
position['day'] = position['day']+1 if position['day'] is not None else "<20"
|
|
|
|
position['week'] = r_serv_term.zrevrank(set_week, term)
|
|
|
|
position['week'] = position['week']+1 if position['week'] is not None else "<20"
|
|
|
|
position['month'] = r_serv_term.zrevrank(set_month, term)
|
|
|
|
position['month'] = position['month']+1 if position['month'] is not None else "<20"
|
|
|
|
value_range = []
|
|
|
|
for timestamp in range(today_timestamp, today_timestamp - num_day*oneDay, -oneDay):
|
2017-02-28 15:01:48 +01:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), term)
|
2016-12-09 08:46:37 +01:00
|
|
|
curr_value_range = int(value) if value is not None else 0
|
|
|
|
value_range.append([timestamp, curr_value_range])
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
to_return.append([term, value_range, tot_value, position])
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
return jsonify(to_return)
|
|
|
|
|
|
|
|
|
2017-07-17 17:26:19 +02:00
|
|
|
@terms.route("/credentials_tracker/")
|
|
|
|
def credentials_tracker():
|
|
|
|
return render_template("credentials_tracker.html")
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
@terms.route("/credentials_management_query_paste/", methods=['GET', 'POST'])
|
2017-07-17 17:26:19 +02:00
|
|
|
def credentials_management_query_paste():
|
|
|
|
cred = request.args.get('cred')
|
2017-07-20 10:04:30 +02:00
|
|
|
allPath = request.json['allPath']
|
2017-07-17 17:26:19 +02:00
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
paste_info = []
|
|
|
|
for pathNum in allPath:
|
|
|
|
path = r_serv_cred.hget(REDIS_KEY_ALL_PATH_SET_REV, pathNum)
|
|
|
|
paste = Paste.Paste(path)
|
|
|
|
p_date = str(paste._get_p_date())
|
2018-07-05 14:45:34 +02:00
|
|
|
p_date = p_date[0:4]+'/'+p_date[4:6]+'/'+p_date[6:8]
|
2017-07-20 10:04:30 +02:00
|
|
|
p_source = paste.p_source
|
|
|
|
p_encoding = paste._get_p_encoding()
|
|
|
|
p_size = paste.p_size
|
|
|
|
p_mime = paste.p_mime
|
|
|
|
p_lineinfo = paste.get_lines_info()
|
2018-04-24 16:44:37 +02:00
|
|
|
p_content = paste.get_p_content()
|
2017-07-20 10:04:30 +02:00
|
|
|
if p_content != 0:
|
|
|
|
p_content = p_content[0:400]
|
|
|
|
paste_info.append({"path": path, "date": p_date, "source": p_source, "encoding": p_encoding, "size": p_size, "mime": p_mime, "lineinfo": p_lineinfo, "content": p_content})
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
return jsonify(paste_info)
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-17 17:26:19 +02:00
|
|
|
@terms.route("/credentials_management_action/", methods=['GET'])
|
|
|
|
def cred_management_action():
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2018-04-17 16:06:32 +02:00
|
|
|
supplied = request.args.get('term')
|
2017-07-17 17:26:19 +02:00
|
|
|
action = request.args.get('action')
|
2017-07-18 16:57:15 +02:00
|
|
|
section = request.args.get('section')
|
2017-07-20 10:04:30 +02:00
|
|
|
extensive = request.args.get('extensive')
|
|
|
|
extensive = True if extensive == "true" else False
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
if extensive:
|
|
|
|
#collectDico
|
|
|
|
AllUsernameInRedis = r_serv_cred.hgetall(REDIS_KEY_ALL_CRED_SET).keys()
|
2017-07-18 16:57:15 +02:00
|
|
|
uniq_num_set = set()
|
|
|
|
if action == "seek":
|
2017-07-20 10:04:30 +02:00
|
|
|
possibilities = mixUserName(supplied, extensive)
|
2017-07-18 16:57:15 +02:00
|
|
|
for poss in possibilities:
|
2017-07-20 10:04:30 +02:00
|
|
|
num = r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, poss)
|
|
|
|
if num is not None:
|
|
|
|
uniq_num_set.add(num)
|
2017-07-18 16:57:15 +02:00
|
|
|
for num in r_serv_cred.smembers(poss):
|
|
|
|
uniq_num_set.add(num)
|
2017-07-20 10:04:30 +02:00
|
|
|
#Extensive /!\
|
|
|
|
if extensive:
|
2017-07-20 10:50:24 +02:00
|
|
|
iter_num = 0
|
|
|
|
tot_iter = len(AllUsernameInRedis)*len(possibilities)
|
2017-07-20 10:04:30 +02:00
|
|
|
for tempUsername in AllUsernameInRedis:
|
|
|
|
for poss in possibilities:
|
2017-07-20 10:50:24 +02:00
|
|
|
#FIXME print progress
|
|
|
|
if(iter_num % int(tot_iter/20) == 0):
|
|
|
|
#print("searching: {}% done".format(int(iter_num/tot_iter*100)), sep=' ', end='\r', flush=True)
|
|
|
|
print("searching: {}% done".format(float(iter_num)/float(tot_iter)*100))
|
|
|
|
iter_num += 1
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
if poss in tempUsername:
|
2018-05-04 13:53:29 +02:00
|
|
|
num = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, tempUsername))
|
2017-07-20 10:04:30 +02:00
|
|
|
if num is not None:
|
|
|
|
uniq_num_set.add(num)
|
|
|
|
for num in r_serv_cred.smembers(tempUsername):
|
|
|
|
uniq_num_set.add(num)
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-19 11:52:06 +02:00
|
|
|
data = {'usr': [], 'path': [], 'numPaste': [], 'simil': []}
|
2017-07-18 16:57:15 +02:00
|
|
|
for Unum in uniq_num_set:
|
2017-07-20 10:04:30 +02:00
|
|
|
levenRatio = 2.0
|
2018-05-04 13:53:29 +02:00
|
|
|
username = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET_REV, Unum))
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-07-19 11:52:06 +02:00
|
|
|
# Calculate Levenshtein distance, ignore negative ratio
|
2017-07-20 10:04:30 +02:00
|
|
|
supp_splitted = supplied.split()
|
|
|
|
supp_mixed = supplied.replace(' ','')
|
|
|
|
supp_splitted.append(supp_mixed)
|
|
|
|
for indiv_supplied in supp_splitted:
|
|
|
|
levenRatio = float(Levenshtein.ratio(indiv_supplied, username))
|
|
|
|
levenRatioStr = "{:.1%}".format(levenRatio)
|
2017-07-19 11:52:06 +02:00
|
|
|
|
|
|
|
data['usr'].append(username)
|
2018-04-17 16:06:32 +02:00
|
|
|
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
allPathNum = list(r_serv_cred.smembers(REDIS_KEY_MAP_CRED_TO_PATH+'_'+Unum))
|
2018-04-17 16:06:32 +02:00
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
data['path'].append(allPathNum)
|
|
|
|
data['numPaste'].append(len(allPathNum))
|
2017-07-19 11:52:06 +02:00
|
|
|
data['simil'].append(levenRatioStr)
|
2017-07-18 16:57:15 +02:00
|
|
|
|
|
|
|
to_return = {}
|
|
|
|
to_return["section"] = section
|
|
|
|
to_return["action"] = action
|
|
|
|
to_return["term"] = supplied
|
|
|
|
to_return["data"] = data
|
|
|
|
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
2017-07-17 17:26:19 +02:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
# ========= REGISTRATION =========
|
2018-09-20 10:38:19 +02:00
|
|
|
app.register_blueprint(terms, url_prefix=baseUrl)
|