2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-12-09 08:46:37 +01:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
'''
|
|
|
|
Flask functions and routes for the trending modules page
|
|
|
|
'''
|
|
|
|
import redis
|
|
|
|
import json
|
|
|
|
import os
|
2017-03-15 11:51:35 +01:00
|
|
|
import datetime
|
2016-12-09 08:46:37 +01:00
|
|
|
import flask
|
2017-04-19 11:02:03 +02:00
|
|
|
from flask import Flask, render_template, jsonify, request, Blueprint
|
2019-06-19 17:02:09 +02:00
|
|
|
|
|
|
|
from Role_Manager import login_admin, login_analyst
|
2019-05-02 17:31:14 +02:00
|
|
|
from flask_login import login_required
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
import Paste
|
2017-03-15 10:07:46 +01:00
|
|
|
from whoosh import index
|
|
|
|
from whoosh.fields import Schema, TEXT, ID
|
|
|
|
from whoosh.qparser import QueryParser
|
2016-12-09 08:46:37 +01:00
|
|
|
|
2018-11-05 10:13:48 +01:00
|
|
|
import time
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ VARIABLES ============
|
|
|
|
import Flask_config
|
|
|
|
|
|
|
|
app = Flask_config.app
|
2019-10-28 13:48:43 +01:00
|
|
|
config_loader = Flask_config.config_loader
|
2018-09-20 10:38:19 +02:00
|
|
|
baseUrl = Flask_config.baseUrl
|
2016-12-09 08:46:37 +01:00
|
|
|
r_serv_pasteName = Flask_config.r_serv_pasteName
|
2018-05-17 11:00:05 +02:00
|
|
|
r_serv_metadata = Flask_config.r_serv_metadata
|
2016-12-09 08:46:37 +01:00
|
|
|
max_preview_char = Flask_config.max_preview_char
|
|
|
|
max_preview_modal = Flask_config.max_preview_modal
|
2018-06-01 11:26:45 +02:00
|
|
|
bootstrap_label = Flask_config.bootstrap_label
|
2018-11-21 16:45:25 +01:00
|
|
|
PASTES_FOLDER = Flask_config.PASTES_FOLDER
|
2017-03-15 09:39:48 +01:00
|
|
|
|
2019-10-28 13:48:43 +01:00
|
|
|
baseindexpath = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Indexer", "path"))
|
|
|
|
indexRegister_path = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Indexer", "register"))
|
2017-03-15 09:39:48 +01:00
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
searches = Blueprint('searches', __name__, template_folder='templates')
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
# ============ FUNCTIONS ============
|
2017-03-15 09:39:48 +01:00
|
|
|
def get_current_index():
|
|
|
|
with open(indexRegister_path, "r") as f:
|
|
|
|
allIndex = f.read()
|
2017-03-15 14:29:49 +01:00
|
|
|
allIndex = allIndex.split() # format [time1\ntime2]
|
2017-03-15 09:39:48 +01:00
|
|
|
allIndex.sort()
|
2017-03-15 11:51:35 +01:00
|
|
|
try:
|
|
|
|
indexname = allIndex[-1].strip('\n\r')
|
|
|
|
except IndexError as e:
|
|
|
|
indexname = "no-index"
|
|
|
|
indexpath = os.path.join(baseindexpath, indexname)
|
2017-03-15 09:39:48 +01:00
|
|
|
return indexpath
|
|
|
|
|
|
|
|
def get_index_list(selected_index=""):
|
2017-03-15 14:05:13 +01:00
|
|
|
temp = []
|
2017-03-15 09:39:48 +01:00
|
|
|
index_list = []
|
|
|
|
for dirs in os.listdir(baseindexpath):
|
|
|
|
if os.path.isdir(os.path.join(baseindexpath, dirs)):
|
2017-03-15 10:07:46 +01:00
|
|
|
value = dirs
|
2017-03-15 11:51:35 +01:00
|
|
|
name = to_iso_date(dirs) + " - " + \
|
2018-11-05 10:13:48 +01:00
|
|
|
str(get_dir_size(dirs) / (1000*1000)) + " Mb " #+ \
|
|
|
|
#"(" + str(get_item_count(dirs))''' + " Items" + ")"
|
2017-03-15 10:07:46 +01:00
|
|
|
flag = dirs==selected_index.split('/')[-1]
|
2017-03-15 14:05:13 +01:00
|
|
|
if dirs == "old_index":
|
|
|
|
temp = [value, name, flag]
|
|
|
|
else:
|
|
|
|
index_list.append([value, name, flag])
|
2017-03-15 11:51:35 +01:00
|
|
|
|
2017-03-15 14:05:13 +01:00
|
|
|
index_list.sort(reverse=True, key=lambda x: x[0])
|
|
|
|
if len(temp) != 0:
|
|
|
|
index_list.append(temp)
|
2018-11-05 10:13:48 +01:00
|
|
|
|
2017-03-15 09:39:48 +01:00
|
|
|
return index_list
|
|
|
|
|
|
|
|
def get_dir_size(directory):
|
|
|
|
cur_sum = 0
|
|
|
|
for directory, subdirs, files in os.walk(os.path.join(baseindexpath,directory)):
|
2017-03-15 11:51:35 +01:00
|
|
|
try:
|
|
|
|
cur_sum += sum(os.path.getsize(os.path.join(directory, name)) for name in files)
|
|
|
|
except OSError as e: #File disappeared
|
|
|
|
pass
|
2017-03-15 09:39:48 +01:00
|
|
|
return cur_sum
|
2016-12-09 08:46:37 +01:00
|
|
|
|
2017-03-15 10:07:46 +01:00
|
|
|
def get_item_count(dirs):
|
|
|
|
ix = index.open_dir(os.path.join(baseindexpath, dirs))
|
|
|
|
return ix.doc_count_all()
|
|
|
|
|
2017-03-15 11:51:35 +01:00
|
|
|
def to_iso_date(timestamp):
|
|
|
|
if timestamp == "old_index":
|
|
|
|
return "old_index"
|
|
|
|
return str(datetime.datetime.fromtimestamp(int(timestamp))).split()[0]
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
# ============ ROUTES ============
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@searches.route("/search", methods=['POST'])
|
2019-05-02 17:31:14 +02:00
|
|
|
@login_required
|
2019-06-19 17:02:09 +02:00
|
|
|
@login_analyst
|
2016-12-09 08:46:37 +01:00
|
|
|
def search():
|
|
|
|
query = request.form['query']
|
|
|
|
q = []
|
|
|
|
q.append(query)
|
|
|
|
r = [] #complete path
|
|
|
|
c = [] #preview of the paste content
|
|
|
|
paste_date = []
|
|
|
|
paste_size = []
|
2018-05-17 11:00:05 +02:00
|
|
|
paste_tags = []
|
2017-03-15 10:07:46 +01:00
|
|
|
index_name = request.form['index_name']
|
2016-12-09 08:46:37 +01:00
|
|
|
num_elem_to_get = 50
|
|
|
|
|
2017-03-15 09:39:48 +01:00
|
|
|
# select correct index
|
2017-03-15 10:07:46 +01:00
|
|
|
if index_name is None or index_name == "0":
|
2017-03-15 09:39:48 +01:00
|
|
|
selected_index = get_current_index()
|
|
|
|
else:
|
2017-03-15 10:07:46 +01:00
|
|
|
selected_index = os.path.join(baseindexpath, index_name)
|
2017-03-15 09:39:48 +01:00
|
|
|
|
2018-11-05 10:13:48 +01:00
|
|
|
''' temporary disabled
|
2016-12-09 08:46:37 +01:00
|
|
|
# Search filename
|
|
|
|
for path in r_serv_pasteName.smembers(q[0]):
|
|
|
|
r.append(path)
|
|
|
|
paste = Paste.Paste(path)
|
2018-04-17 16:06:32 +02:00
|
|
|
content = paste.get_p_content()
|
2016-12-09 08:46:37 +01:00
|
|
|
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
|
|
|
c.append(content[0:content_range])
|
|
|
|
curr_date = str(paste._get_p_date())
|
|
|
|
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
|
|
|
|
paste_date.append(curr_date)
|
|
|
|
paste_size.append(paste._get_p_size())
|
2018-11-05 10:13:48 +01:00
|
|
|
'''
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
# Search full line
|
|
|
|
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT)
|
|
|
|
|
2017-03-15 09:39:48 +01:00
|
|
|
ix = index.open_dir(selected_index)
|
2016-12-09 08:46:37 +01:00
|
|
|
with ix.searcher() as searcher:
|
2018-11-05 10:13:48 +01:00
|
|
|
query = QueryParser("content", ix.schema).parse("".join(q))
|
2016-12-09 08:46:37 +01:00
|
|
|
results = searcher.search_page(query, 1, pagelen=num_elem_to_get)
|
|
|
|
for x in results:
|
2018-11-21 16:45:25 +01:00
|
|
|
r.append(x.items()[0][1].replace(PASTES_FOLDER, '', 1))
|
|
|
|
path = x.items()[0][1].replace(PASTES_FOLDER, '', 1)
|
2018-05-17 11:00:05 +02:00
|
|
|
paste = Paste.Paste(path)
|
2018-04-17 16:06:32 +02:00
|
|
|
content = paste.get_p_content()
|
2016-12-09 08:46:37 +01:00
|
|
|
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
|
|
|
c.append(content[0:content_range])
|
|
|
|
curr_date = str(paste._get_p_date())
|
|
|
|
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
|
|
|
|
paste_date.append(curr_date)
|
|
|
|
paste_size.append(paste._get_p_size())
|
2018-05-17 11:00:05 +02:00
|
|
|
p_tags = r_serv_metadata.smembers('tag:'+path)
|
|
|
|
l_tags = []
|
|
|
|
for tag in p_tags:
|
2018-05-23 16:58:56 +02:00
|
|
|
complete_tag = tag
|
2018-05-17 11:00:05 +02:00
|
|
|
tag = tag.split('=')
|
|
|
|
if len(tag) > 1:
|
|
|
|
if tag[1] != '':
|
|
|
|
tag = tag[1][1:-1]
|
|
|
|
# no value
|
|
|
|
else:
|
|
|
|
tag = tag[0][1:-1]
|
|
|
|
# use for custom tags
|
|
|
|
else:
|
|
|
|
tag = tag[0]
|
|
|
|
|
2018-05-23 16:58:56 +02:00
|
|
|
l_tags.append( (tag, complete_tag) )
|
2018-05-17 11:00:05 +02:00
|
|
|
|
|
|
|
paste_tags.append(l_tags)
|
2016-12-09 08:46:37 +01:00
|
|
|
results = searcher.search(query)
|
|
|
|
num_res = len(results)
|
|
|
|
|
2018-11-05 10:13:48 +01:00
|
|
|
index_list = get_index_list()
|
|
|
|
|
2017-03-15 09:39:48 +01:00
|
|
|
index_min = 1
|
2018-11-05 10:13:48 +01:00
|
|
|
index_max = len(index_list)
|
|
|
|
|
2018-04-17 16:06:32 +02:00
|
|
|
return render_template("search.html", r=r, c=c,
|
|
|
|
query=request.form['query'], paste_date=paste_date,
|
|
|
|
paste_size=paste_size, char_to_display=max_preview_modal,
|
2017-03-15 09:39:48 +01:00
|
|
|
num_res=num_res, index_min=index_min, index_max=index_max,
|
2018-05-17 11:00:05 +02:00
|
|
|
bootstrap_label=bootstrap_label,
|
|
|
|
paste_tags=paste_tags,
|
2018-11-05 10:13:48 +01:00
|
|
|
index_list=index_list
|
2017-03-15 09:39:48 +01:00
|
|
|
)
|
2016-12-09 08:46:37 +01:00
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
@searches.route("/get_more_search_result", methods=['POST'])
|
2019-05-02 17:31:14 +02:00
|
|
|
@login_required
|
2019-06-19 17:02:09 +02:00
|
|
|
@login_analyst
|
2016-12-09 08:46:37 +01:00
|
|
|
def get_more_search_result():
|
|
|
|
query = request.form['query']
|
|
|
|
q = []
|
|
|
|
q.append(query)
|
|
|
|
page_offset = int(request.form['page_offset'])
|
2017-03-15 10:07:46 +01:00
|
|
|
index_name = request.form['index_name']
|
2016-12-09 08:46:37 +01:00
|
|
|
num_elem_to_get = 50
|
|
|
|
|
2017-03-15 09:39:48 +01:00
|
|
|
# select correct index
|
2017-03-15 10:07:46 +01:00
|
|
|
if index_name is None or index_name == "0":
|
2017-03-15 09:39:48 +01:00
|
|
|
selected_index = get_current_index()
|
|
|
|
else:
|
2017-03-15 10:07:46 +01:00
|
|
|
selected_index = os.path.join(baseindexpath, index_name)
|
2017-03-15 09:39:48 +01:00
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
path_array = []
|
|
|
|
preview_array = []
|
|
|
|
date_array = []
|
|
|
|
size_array = []
|
2018-05-17 11:00:05 +02:00
|
|
|
list_tags = []
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT)
|
|
|
|
|
2017-03-15 09:39:48 +01:00
|
|
|
ix = index.open_dir(selected_index)
|
2016-12-09 08:46:37 +01:00
|
|
|
with ix.searcher() as searcher:
|
|
|
|
query = QueryParser("content", ix.schema).parse(" ".join(q))
|
2018-04-17 16:06:32 +02:00
|
|
|
results = searcher.search_page(query, page_offset, num_elem_to_get)
|
2016-12-09 08:46:37 +01:00
|
|
|
for x in results:
|
2018-05-17 11:00:05 +02:00
|
|
|
path = x.items()[0][1]
|
2018-11-21 16:45:25 +01:00
|
|
|
path = path.replace(PASTES_FOLDER, '', 1)
|
2018-05-17 11:00:05 +02:00
|
|
|
path_array.append(path)
|
|
|
|
paste = Paste.Paste(path)
|
2018-04-17 16:06:32 +02:00
|
|
|
content = paste.get_p_content()
|
2016-12-09 08:46:37 +01:00
|
|
|
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
|
|
|
preview_array.append(content[0:content_range])
|
|
|
|
curr_date = str(paste._get_p_date())
|
|
|
|
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
|
|
|
|
date_array.append(curr_date)
|
|
|
|
size_array.append(paste._get_p_size())
|
2018-05-17 11:00:05 +02:00
|
|
|
p_tags = r_serv_metadata.smembers('tag:'+path)
|
|
|
|
l_tags = []
|
|
|
|
for tag in p_tags:
|
2018-11-05 10:36:58 +01:00
|
|
|
complete_tag = tag
|
2018-05-17 11:00:05 +02:00
|
|
|
tag = tag.split('=')
|
|
|
|
if len(tag) > 1:
|
|
|
|
if tag[1] != '':
|
|
|
|
tag = tag[1][1:-1]
|
|
|
|
# no value
|
|
|
|
else:
|
|
|
|
tag = tag[0][1:-1]
|
|
|
|
# use for custom tags
|
|
|
|
else:
|
|
|
|
tag = tag[0]
|
|
|
|
|
2018-11-05 10:36:58 +01:00
|
|
|
l_tags.append( (tag, complete_tag) )
|
2018-05-17 11:00:05 +02:00
|
|
|
list_tags.append(l_tags)
|
|
|
|
|
2016-12-09 08:46:37 +01:00
|
|
|
to_return = {}
|
|
|
|
to_return["path_array"] = path_array
|
|
|
|
to_return["preview_array"] = preview_array
|
|
|
|
to_return["date_array"] = date_array
|
|
|
|
to_return["size_array"] = size_array
|
2018-05-17 11:00:05 +02:00
|
|
|
to_return["list_tags"] = list_tags
|
|
|
|
to_return["bootstrap_label"] = bootstrap_label
|
2016-12-09 08:46:37 +01:00
|
|
|
if len(path_array) < num_elem_to_get: #pagelength
|
|
|
|
to_return["moreData"] = False
|
|
|
|
else:
|
|
|
|
to_return["moreData"] = True
|
|
|
|
|
|
|
|
return jsonify(to_return)
|
|
|
|
|
|
|
|
|
2017-04-19 11:02:03 +02:00
|
|
|
# ========= REGISTRATION =========
|
2018-09-20 10:38:19 +02:00
|
|
|
app.register_blueprint(searches, url_prefix=baseUrl)
|