2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-12-09 08:46:37 +01:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
'''
|
|
|
|
Flask functions and routes for the trending modules page
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-07-20 10:24:48 +02:00
|
|
|
note: The matching of credential against supplied credential is done using Levenshtein distance
|
2016-12-09 08:46:37 +01:00
|
|
|
'''
|
2019-08-14 09:44:49 +02:00
|
|
|
import json
|
2016-12-09 08:46:37 +01:00
|
|
|
import redis
|
|
|
|
import datetime
|
|
|
|
import calendar
|
|
|
|
import flask
|
2019-08-14 09:44:49 +02:00
|
|
|
from flask import Flask, render_template, jsonify, request, Blueprint, url_for, redirect, Response
|
2019-06-19 17:02:09 +02:00
|
|
|
|
2019-11-20 16:15:08 +01:00
|
|
|
from Role_Manager import login_admin, login_analyst, login_user_no_api, login_read_only
|
2019-08-14 09:44:49 +02:00
|
|
|
from flask_login import login_required, current_user
|
2019-05-02 17:31:14 +02:00
|
|
|
|
2017-03-28 17:42:44 +02:00
|
|
|
import re
|
2017-07-18 16:57:15 +02:00
|
|
|
from pprint import pprint
|
2017-07-19 11:52:06 +02:00
|
|
|
import Levenshtein
|
2016-12-09 08:46:37 +01:00
|
|
|
|
2019-08-14 09:44:49 +02:00
|
|
|
# ---------------------------------------------------------------
|
|
|
|
|
|
|
|
import Paste
|
|
|
|
import Term
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ VARIABLES ============
|
|
|
|
import Flask_config
|
|
|
|
|
|
|
|
app = Flask_config.app
|
2018-09-20 10:38:19 +02:00
|
|
|
baseUrl = Flask_config.baseUrl
|
2016-12-09 08:46:37 +01:00
|
|
|
r_serv_term = Flask_config.r_serv_term
|
2017-07-18 16:57:15 +02:00
|
|
|
r_serv_cred = Flask_config.r_serv_cred
|
2018-11-06 16:08:58 +01:00
|
|
|
r_serv_db = Flask_config.r_serv_db
|
2018-11-06 13:38:37 +01:00
|
|
|
bootstrap_label = Flask_config.bootstrap_label
|
2017-03-28 17:42:44 +02:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
terms = Blueprint('terms', __name__, template_folder='templates')
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
'''TERM'''
|
2017-03-28 17:42:44 +02:00
|
|
|
DEFAULT_MATCH_PERCENT = 50
|
|
|
|
|
|
|
|
#tracked
|
|
|
|
TrackedTermsSet_Name = "TrackedSetTermSet"
|
|
|
|
TrackedTermsDate_Name = "TrackedTermDate"
|
|
|
|
#black
|
|
|
|
BlackListTermsDate_Name = "BlackListTermDate"
|
|
|
|
BlackListTermsSet_Name = "BlackListSetTermSet"
|
|
|
|
#regex
|
|
|
|
TrackedRegexSet_Name = "TrackedRegexSet"
|
|
|
|
TrackedRegexDate_Name = "TrackedRegexDate"
|
|
|
|
#set
|
|
|
|
TrackedSetSet_Name = "TrackedSetSet"
|
|
|
|
TrackedSetDate_Name = "TrackedSetDate"
|
|
|
|
|
2018-03-01 08:50:27 +01:00
|
|
|
# notifications enabled/disabled
|
2018-02-27 15:12:02 +01:00
|
|
|
# same value as in `bin/NotificationHelper.py`
|
|
|
|
TrackedTermsNotificationEnabled_Name = "TrackedNotifications"
|
|
|
|
|
|
|
|
# associated notification email addresses for a specific term`
|
|
|
|
# same value as in `bin/NotificationHelper.py`
|
|
|
|
# Keys will be e.g. TrackedNotificationEmails_<TERMNAME>
|
|
|
|
TrackedTermsNotificationEmailsPrefix_Name = "TrackedNotificationEmails_"
|
2018-11-06 13:38:37 +01:00
|
|
|
TrackedTermsNotificationTagsPrefix_Name = "TrackedNotificationTags_"
|
2017-07-20 10:04:30 +02:00
|
|
|
|
|
|
|
'''CRED'''
|
|
|
|
REGEX_CRED = '[a-z]+|[A-Z]{3,}|[A-Z]{1,2}[a-z]+|[0-9]+'
|
|
|
|
REDIS_KEY_NUM_USERNAME = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_NUM_PATH = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_ALL_CRED_SET = 'AllCredentials'
|
|
|
|
REDIS_KEY_ALL_CRED_SET_REV = 'AllCredentialsRev'
|
|
|
|
REDIS_KEY_ALL_PATH_SET = 'AllPath'
|
|
|
|
REDIS_KEY_ALL_PATH_SET_REV = 'AllPathRev'
|
|
|
|
REDIS_KEY_MAP_CRED_TO_PATH = 'CredToPathMapping'
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ FUNCTIONS ============
|
|
|
|
|
2017-02-28 15:54:39 +01:00
|
|
|
def Term_getValueOverRange(word, startDate, num_day, per_paste=""):
|
2016-12-09 08:46:37 +01:00
|
|
|
passed_days = 0
|
|
|
|
oneDay = 60*60*24
|
|
|
|
to_return = []
|
|
|
|
curr_to_return = 0
|
|
|
|
for timestamp in range(startDate, startDate - max(num_day)*oneDay, -oneDay):
|
2017-02-28 15:54:39 +01:00
|
|
|
value = r_serv_term.hget(per_paste+str(timestamp), word)
|
2018-05-04 13:53:29 +02:00
|
|
|
curr_to_return += int(value) if value is not None else 0
|
2016-12-09 08:46:37 +01:00
|
|
|
for i in num_day:
|
|
|
|
if passed_days == i-1:
|
|
|
|
to_return.append(curr_to_return)
|
|
|
|
passed_days += 1
|
|
|
|
return to_return
|
|
|
|
|
2017-07-20 10:50:24 +02:00
|
|
|
#Mix suplied username, if extensive is set, slice username(s) with different windows
|
2017-07-20 10:04:30 +02:00
|
|
|
def mixUserName(supplied, extensive=False):
|
2017-07-19 11:52:06 +02:00
|
|
|
#e.g.: John Smith
|
|
|
|
terms = supplied.split()[:2]
|
|
|
|
usernames = []
|
|
|
|
if len(terms) == 1:
|
|
|
|
terms.append(' ')
|
|
|
|
|
|
|
|
#john, smith, John, Smith, JOHN, SMITH
|
|
|
|
usernames += [terms[0].lower()]
|
|
|
|
usernames += [terms[1].lower()]
|
|
|
|
usernames += [terms[0][0].upper() + terms[0][1:].lower()]
|
|
|
|
usernames += [terms[1][0].upper() + terms[1][1:].lower()]
|
|
|
|
usernames += [terms[0].upper()]
|
|
|
|
usernames += [terms[1].upper()]
|
|
|
|
|
|
|
|
#johnsmith, smithjohn, JOHNsmith, johnSMITH, SMITHjohn, smithJOHN
|
|
|
|
usernames += [(terms[0].lower() + terms[1].lower()).strip()]
|
|
|
|
usernames += [(terms[1].lower() + terms[0].lower()).strip()]
|
|
|
|
usernames += [(terms[0].upper() + terms[1].lower()).strip()]
|
|
|
|
usernames += [(terms[0].lower() + terms[1].upper()).strip()]
|
|
|
|
usernames += [(terms[1].upper() + terms[0].lower()).strip()]
|
|
|
|
usernames += [(terms[1].lower() + terms[0].upper()).strip()]
|
|
|
|
#Jsmith, JSmith, jsmith, jSmith, johnS, Js, JohnSmith, Johnsmith, johnSmith
|
|
|
|
usernames += [(terms[0][0].upper() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0].lower() + terms[1][0].upper()).strip()]
|
|
|
|
usernames += [(terms[0].upper() + terms[1][0].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[0][1:].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].upper() + terms[0][1:].lower() + terms[1][0].lower() + terms[1][1:].lower()).strip()]
|
|
|
|
usernames += [(terms[0][0].lower() + terms[0][1:].lower() + terms[1][0].upper() + terms[1][1:].lower()).strip()]
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
if not extensive:
|
|
|
|
return usernames
|
|
|
|
|
2017-07-20 10:50:24 +02:00
|
|
|
#Slice the supplied username(s)
|
2017-07-20 10:04:30 +02:00
|
|
|
mixedSupplied = supplied.replace(' ','')
|
|
|
|
minWindow = 3 if len(mixedSupplied)/2 < 4 else len(mixedSupplied)/2
|
|
|
|
for winSize in range(3,len(mixedSupplied)):
|
|
|
|
for startIndex in range(0, len(mixedSupplied)-winSize):
|
|
|
|
usernames += [mixedSupplied[startIndex:startIndex+winSize]]
|
|
|
|
|
|
|
|
filtered_usernames = []
|
|
|
|
for usr in usernames:
|
|
|
|
if len(usr) > 2:
|
|
|
|
filtered_usernames.append(usr)
|
|
|
|
return filtered_usernames
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2018-11-06 16:08:58 +01:00
|
|
|
def save_tag_to_auto_push(list_tag):
|
|
|
|
for tag in set(list_tag):
|
2018-11-07 10:17:57 +01:00
|
|
|
#limit tag length
|
|
|
|
if len(tag) > 49:
|
|
|
|
tag = tag[0:48]
|
2018-11-06 16:08:58 +01:00
|
|
|
r_serv_db.sadd('list_export_tags', tag)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
# ============ ROUTES ============
|
|
|
|
|
2020-01-09 11:12:58 +01:00
|
|
|
# TODO: remove + clean
|
|
|
|
|
|
|
|
# @terms.route("/terms_plot_tool/")
|
|
|
|
# @login_required
|
|
|
|
# @login_read_only
|
|
|
|
# def terms_plot_tool():
|
|
|
|
# term = request.args.get('term')
|
|
|
|
# if term is not None:
|
|
|
|
# return render_template("terms_plot_tool.html", term=term)
|
|
|
|
# else:
|
|
|
|
# return render_template("terms_plot_tool.html", term="")
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# @terms.route("/terms_plot_tool_data/")
|
|
|
|
# @login_required
|
|
|
|
# @login_read_only
|
|
|
|
# def terms_plot_tool_data():
|
|
|
|
# oneDay = 60*60*24
|
|
|
|
# range_start = datetime.datetime.utcfromtimestamp(int(float(request.args.get('range_start')))) if request.args.get('range_start') is not None else 0;
|
|
|
|
# range_start = range_start.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
# range_start = calendar.timegm(range_start.timetuple())
|
|
|
|
# range_end = datetime.datetime.utcfromtimestamp(int(float(request.args.get('range_end')))) if request.args.get('range_end') is not None else 0;
|
|
|
|
# range_end = range_end.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
# range_end = calendar.timegm(range_end.timetuple())
|
|
|
|
# term = request.args.get('term')
|
|
|
|
#
|
|
|
|
# per_paste = request.args.get('per_paste')
|
|
|
|
# if per_paste == "1" or per_paste is None:
|
|
|
|
# per_paste = "per_paste_"
|
|
|
|
# else:
|
|
|
|
# per_paste = ""
|
|
|
|
#
|
|
|
|
# if term is None:
|
|
|
|
# return "None"
|
|
|
|
#
|
|
|
|
# else:
|
|
|
|
# value_range = []
|
|
|
|
# for timestamp in range(range_start, range_end+oneDay, oneDay):
|
|
|
|
# value = r_serv_term.hget(per_paste+str(timestamp), term)
|
|
|
|
# curr_value_range = int(value) if value is not None else 0
|
|
|
|
# value_range.append([timestamp, curr_value_range])
|
|
|
|
# value_range.insert(0,term)
|
|
|
|
# return jsonify(value_range)
|
|
|
|
#
|
|
|
|
|
|
|
|
# @terms.route("/terms_plot_top/"
|
|
|
|
# @login_required
|
|
|
|
# @login_read_only
|
|
|
|
# def terms_plot_top():
|
|
|
|
# per_paste = request.args.get('per_paste')
|
|
|
|
# per_paste = per_paste if per_paste is not None else 1
|
|
|
|
# return render_template("terms_plot_top.html", per_paste=per_paste)
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# @terms.route("/terms_plot_top_data/")
|
|
|
|
# @login_required
|
|
|
|
# @login_read_only
|
|
|
|
# def terms_plot_top_data():
|
|
|
|
# oneDay = 60*60*24
|
|
|
|
# today = datetime.datetime.now()
|
|
|
|
# today = today.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
# today_timestamp = calendar.timegm(today.timetuple())
|
|
|
|
#
|
|
|
|
# per_paste = request.args.get('per_paste')
|
|
|
|
# if per_paste == "1" or per_paste is None:
|
|
|
|
# per_paste = "per_paste_"
|
|
|
|
# else:
|
|
|
|
# per_paste = ""
|
|
|
|
#
|
|
|
|
# set_day = per_paste + "TopTermFreq_set_day_" + str(today_timestamp)
|
|
|
|
# set_week = per_paste + "TopTermFreq_set_week";
|
|
|
|
# set_month = per_paste + "TopTermFreq_set_month";
|
|
|
|
#
|
|
|
|
# the_set = per_paste + request.args.get('set')
|
|
|
|
# num_day = int(request.args.get('num_day'))
|
|
|
|
#
|
|
|
|
# if the_set is None:
|
|
|
|
# return "None"
|
|
|
|
# else:
|
|
|
|
# to_return = []
|
|
|
|
# if "TopTermFreq_set_day" in the_set:
|
|
|
|
# the_set += "_" + str(today_timestamp)
|
|
|
|
#
|
|
|
|
# for term, tot_value in r_serv_term.zrevrangebyscore(the_set, '+inf', '-inf', withscores=True, start=0, num=20):
|
|
|
|
# position = {}
|
|
|
|
# position['day'] = r_serv_term.zrevrank(set_day, term)
|
|
|
|
# position['day'] = position['day']+1 if position['day'] is not None else "<20"
|
|
|
|
# position['week'] = r_serv_term.zrevrank(set_week, term)
|
|
|
|
# position['week'] = position['week']+1 if position['week'] is not None else "<20"
|
|
|
|
# position['month'] = r_serv_term.zrevrank(set_month, term)
|
|
|
|
# position['month'] = position['month']+1 if position['month'] is not None else "<20"
|
|
|
|
# value_range = []
|
|
|
|
# for timestamp in range(today_timestamp, today_timestamp - num_day*oneDay, -oneDay):
|
|
|
|
# value = r_serv_term.hget(per_paste+str(timestamp), term)
|
|
|
|
# curr_value_range = int(value) if value is not None else 0
|
|
|
|
# value_range.append([timestamp, curr_value_range])
|
|
|
|
#
|
|
|
|
# to_return.append([term, value_range, tot_value, position])
|
|
|
|
#
|
|
|
|
# return jsonify(to_return)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
|
2017-07-17 17:26:19 +02:00
|
|
|
@terms.route("/credentials_tracker/")
|
2019-05-02 17:31:14 +02:00
|
|
|
@login_required
|
2019-11-20 16:15:08 +01:00
|
|
|
@login_read_only
|
2017-07-17 17:26:19 +02:00
|
|
|
def credentials_tracker():
|
|
|
|
return render_template("credentials_tracker.html")
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
@terms.route("/credentials_management_query_paste/", methods=['GET', 'POST'])
|
2019-05-02 17:31:14 +02:00
|
|
|
@login_required
|
2019-11-20 16:15:08 +01:00
|
|
|
@login_user_no_api
|
2017-07-17 17:26:19 +02:00
|
|
|
def credentials_management_query_paste():
|
|
|
|
cred = request.args.get('cred')
|
2017-07-20 10:04:30 +02:00
|
|
|
allPath = request.json['allPath']
|
2017-07-17 17:26:19 +02:00
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
paste_info = []
|
|
|
|
for pathNum in allPath:
|
|
|
|
path = r_serv_cred.hget(REDIS_KEY_ALL_PATH_SET_REV, pathNum)
|
|
|
|
paste = Paste.Paste(path)
|
|
|
|
p_date = str(paste._get_p_date())
|
2018-07-05 14:45:34 +02:00
|
|
|
p_date = p_date[0:4]+'/'+p_date[4:6]+'/'+p_date[6:8]
|
2017-07-20 10:04:30 +02:00
|
|
|
p_source = paste.p_source
|
|
|
|
p_encoding = paste._get_p_encoding()
|
|
|
|
p_size = paste.p_size
|
|
|
|
p_mime = paste.p_mime
|
|
|
|
p_lineinfo = paste.get_lines_info()
|
2018-04-24 16:44:37 +02:00
|
|
|
p_content = paste.get_p_content()
|
2017-07-20 10:04:30 +02:00
|
|
|
if p_content != 0:
|
|
|
|
p_content = p_content[0:400]
|
|
|
|
paste_info.append({"path": path, "date": p_date, "source": p_source, "encoding": p_encoding, "size": p_size, "mime": p_mime, "lineinfo": p_lineinfo, "content": p_content})
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
return jsonify(paste_info)
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-17 17:26:19 +02:00
|
|
|
@terms.route("/credentials_management_action/", methods=['GET'])
|
2019-05-02 17:31:14 +02:00
|
|
|
@login_required
|
2019-11-20 16:15:08 +01:00
|
|
|
@login_user_no_api
|
2017-07-17 17:26:19 +02:00
|
|
|
def cred_management_action():
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2018-04-17 16:06:32 +02:00
|
|
|
supplied = request.args.get('term')
|
2017-07-17 17:26:19 +02:00
|
|
|
action = request.args.get('action')
|
2017-07-18 16:57:15 +02:00
|
|
|
section = request.args.get('section')
|
2017-07-20 10:04:30 +02:00
|
|
|
extensive = request.args.get('extensive')
|
|
|
|
extensive = True if extensive == "true" else False
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
if extensive:
|
|
|
|
#collectDico
|
|
|
|
AllUsernameInRedis = r_serv_cred.hgetall(REDIS_KEY_ALL_CRED_SET).keys()
|
2017-07-18 16:57:15 +02:00
|
|
|
uniq_num_set = set()
|
|
|
|
if action == "seek":
|
2017-07-20 10:04:30 +02:00
|
|
|
possibilities = mixUserName(supplied, extensive)
|
2017-07-18 16:57:15 +02:00
|
|
|
for poss in possibilities:
|
2017-07-20 10:04:30 +02:00
|
|
|
num = r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, poss)
|
|
|
|
if num is not None:
|
|
|
|
uniq_num_set.add(num)
|
2017-07-18 16:57:15 +02:00
|
|
|
for num in r_serv_cred.smembers(poss):
|
|
|
|
uniq_num_set.add(num)
|
2017-07-20 10:04:30 +02:00
|
|
|
#Extensive /!\
|
|
|
|
if extensive:
|
2017-07-20 10:50:24 +02:00
|
|
|
iter_num = 0
|
|
|
|
tot_iter = len(AllUsernameInRedis)*len(possibilities)
|
2017-07-20 10:04:30 +02:00
|
|
|
for tempUsername in AllUsernameInRedis:
|
|
|
|
for poss in possibilities:
|
2017-07-20 10:50:24 +02:00
|
|
|
#FIXME print progress
|
|
|
|
if(iter_num % int(tot_iter/20) == 0):
|
|
|
|
#print("searching: {}% done".format(int(iter_num/tot_iter*100)), sep=' ', end='\r', flush=True)
|
|
|
|
print("searching: {}% done".format(float(iter_num)/float(tot_iter)*100))
|
|
|
|
iter_num += 1
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
if poss in tempUsername:
|
2018-05-04 13:53:29 +02:00
|
|
|
num = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET, tempUsername))
|
2017-07-20 10:04:30 +02:00
|
|
|
if num is not None:
|
|
|
|
uniq_num_set.add(num)
|
|
|
|
for num in r_serv_cred.smembers(tempUsername):
|
|
|
|
uniq_num_set.add(num)
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-07-19 11:52:06 +02:00
|
|
|
data = {'usr': [], 'path': [], 'numPaste': [], 'simil': []}
|
2017-07-18 16:57:15 +02:00
|
|
|
for Unum in uniq_num_set:
|
2017-07-20 10:04:30 +02:00
|
|
|
levenRatio = 2.0
|
2018-05-04 13:53:29 +02:00
|
|
|
username = (r_serv_cred.hget(REDIS_KEY_ALL_CRED_SET_REV, Unum))
|
2018-02-28 09:19:27 +01:00
|
|
|
|
2017-07-19 11:52:06 +02:00
|
|
|
# Calculate Levenshtein distance, ignore negative ratio
|
2017-07-20 10:04:30 +02:00
|
|
|
supp_splitted = supplied.split()
|
|
|
|
supp_mixed = supplied.replace(' ','')
|
|
|
|
supp_splitted.append(supp_mixed)
|
|
|
|
for indiv_supplied in supp_splitted:
|
|
|
|
levenRatio = float(Levenshtein.ratio(indiv_supplied, username))
|
|
|
|
levenRatioStr = "{:.1%}".format(levenRatio)
|
2017-07-19 11:52:06 +02:00
|
|
|
|
|
|
|
data['usr'].append(username)
|
2018-04-17 16:06:32 +02:00
|
|
|
|
|
|
|
|
2017-07-20 10:04:30 +02:00
|
|
|
allPathNum = list(r_serv_cred.smembers(REDIS_KEY_MAP_CRED_TO_PATH+'_'+Unum))
|
2018-04-17 16:06:32 +02:00
|
|
|
|
2018-05-04 13:53:29 +02:00
|
|
|
data['path'].append(allPathNum)
|
|
|
|
data['numPaste'].append(len(allPathNum))
|
2017-07-19 11:52:06 +02:00
|
|
|
data['simil'].append(levenRatioStr)
|
2017-07-18 16:57:15 +02:00
|
|
|
|
|
|
|
to_return = {}
|
|
|
|
to_return["section"] = section
|
|
|
|
to_return["action"] = action
|
|
|
|
to_return["term"] = supplied
|
|
|
|
to_return["data"] = data
|
|
|
|
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
2017-07-17 17:26:19 +02:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
# ========= REGISTRATION =========
|
2018-09-20 10:38:19 +02:00
|
|
|
app.register_blueprint(terms, url_prefix=baseUrl)
|