2016-02-04 15:22:51 +01:00
|
|
|
#!/usr/bin/env python
|
2014-08-11 11:04:09 +02:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
"""
|
|
|
|
The ZMQ_Sub_Indexer Module
|
|
|
|
============================
|
|
|
|
|
|
|
|
The ZMQ_Sub_Indexer modules is fetching the list of files to be processed
|
|
|
|
and index each file with a full-text indexer (Whoosh until now).
|
|
|
|
|
|
|
|
"""
|
2014-08-14 17:55:18 +02:00
|
|
|
import time
|
|
|
|
from packages import Paste
|
2014-08-11 11:04:09 +02:00
|
|
|
from pubsublogger import publisher
|
|
|
|
|
|
|
|
from whoosh.index import create_in, exists_in, open_dir
|
2014-08-14 17:55:18 +02:00
|
|
|
from whoosh.fields import Schema, TEXT, ID
|
2017-04-19 16:37:04 +02:00
|
|
|
import shutil
|
2014-08-11 11:04:09 +02:00
|
|
|
import os
|
2017-03-14 10:37:31 +01:00
|
|
|
from os.path import join, getsize
|
2014-08-11 11:04:09 +02:00
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
from Helper import Process
|
2014-08-11 11:04:09 +02:00
|
|
|
|
2017-03-14 10:37:31 +01:00
|
|
|
# Config variable
|
2017-04-19 16:37:04 +02:00
|
|
|
TIME_WAIT = 60*15 #sec
|
2017-03-14 10:37:31 +01:00
|
|
|
|
|
|
|
# return in bytes
|
2017-04-18 16:02:22 +02:00
|
|
|
def check_index_size(baseindexpath, indexname):
|
2017-04-18 15:57:07 +02:00
|
|
|
the_index_name = join(baseindexpath, indexname)
|
2017-03-14 10:37:31 +01:00
|
|
|
cur_sum = 0
|
|
|
|
for root, dirs, files in os.walk(the_index_name):
|
|
|
|
cur_sum += sum(getsize(join(root, name)) for name in files)
|
|
|
|
return cur_sum
|
|
|
|
|
2017-03-15 11:51:35 +01:00
|
|
|
def move_index_into_old_index_folder(baseindexpath):
|
2017-04-19 16:37:04 +02:00
|
|
|
for cur_file in os.listdir(baseindexpath):
|
|
|
|
if not cur_file == "old_index":
|
|
|
|
shutil.move(join(baseindexpath, cur_file), join(join(baseindexpath, "old_index"), cur_file))
|
2017-03-15 11:51:35 +01:00
|
|
|
|
2014-08-14 17:55:18 +02:00
|
|
|
|
2014-08-19 19:07:07 +02:00
|
|
|
if __name__ == "__main__":
|
2014-08-22 17:35:40 +02:00
|
|
|
publisher.port = 6380
|
2014-08-19 19:07:07 +02:00
|
|
|
publisher.channel = "Script"
|
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
config_section = 'Indexer'
|
2014-08-11 11:04:09 +02:00
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
p = Process(config_section)
|
2014-08-11 11:04:09 +02:00
|
|
|
|
|
|
|
# Indexer configuration - index dir and schema setup
|
2017-03-15 11:51:35 +01:00
|
|
|
baseindexpath = join(os.environ['AIL_HOME'],
|
2014-08-29 19:37:56 +02:00
|
|
|
p.config.get("Indexer", "path"))
|
2017-03-15 11:51:35 +01:00
|
|
|
indexRegister_path = join(os.environ['AIL_HOME'],
|
2017-03-14 10:37:31 +01:00
|
|
|
p.config.get("Indexer", "register"))
|
2014-08-29 19:37:56 +02:00
|
|
|
indexertype = p.config.get("Indexer", "type")
|
2017-03-15 14:05:13 +01:00
|
|
|
INDEX_SIZE_THRESHOLD = int(p.config.get("Indexer", "index_max_size"))
|
2014-08-11 11:04:09 +02:00
|
|
|
if indexertype == "whoosh":
|
2014-08-19 19:07:07 +02:00
|
|
|
schema = Schema(title=TEXT(stored=True), path=ID(stored=True,
|
|
|
|
unique=True),
|
|
|
|
content=TEXT)
|
2017-03-14 10:37:31 +01:00
|
|
|
if not os.path.exists(baseindexpath):
|
|
|
|
os.mkdir(baseindexpath)
|
|
|
|
|
|
|
|
# create the index register if not present
|
2017-03-15 11:51:35 +01:00
|
|
|
time_now = int(time.time())
|
|
|
|
if not os.path.isfile(indexRegister_path): #index are not organised
|
2017-03-15 14:29:49 +01:00
|
|
|
print("Indexes are not organized")
|
|
|
|
print("moving all files in folder 'old_index' ")
|
2017-03-15 11:51:35 +01:00
|
|
|
#move all files to old_index folder
|
|
|
|
move_index_into_old_index_folder(baseindexpath)
|
2017-03-15 14:29:49 +01:00
|
|
|
print("Creating new index")
|
2017-03-15 11:51:35 +01:00
|
|
|
#create all_index.txt
|
2017-03-14 10:37:31 +01:00
|
|
|
with open(indexRegister_path, 'w') as f:
|
2017-03-15 11:51:35 +01:00
|
|
|
f.write(str(time_now))
|
|
|
|
#create dir
|
2017-03-15 14:05:13 +01:00
|
|
|
os.mkdir(join(baseindexpath, str(time_now)))
|
2017-03-14 10:37:31 +01:00
|
|
|
|
|
|
|
with open(indexRegister_path, "r") as f:
|
|
|
|
allIndex = f.read()
|
2017-03-15 14:29:49 +01:00
|
|
|
allIndex = allIndex.split() # format [time1\ntime2]
|
2017-03-14 10:37:31 +01:00
|
|
|
allIndex.sort()
|
|
|
|
|
2017-03-15 11:51:35 +01:00
|
|
|
try:
|
|
|
|
indexname = allIndex[-1].strip('\n\r')
|
|
|
|
except IndexError as e:
|
|
|
|
indexname = time_now
|
|
|
|
|
|
|
|
indexpath = join(baseindexpath, str(indexname))
|
2017-03-14 10:37:31 +01:00
|
|
|
if not exists_in(indexpath):
|
|
|
|
ix = create_in(indexpath, schema)
|
|
|
|
else:
|
|
|
|
ix = open_dir(indexpath)
|
|
|
|
|
2017-03-15 11:51:35 +01:00
|
|
|
last_refresh = time_now
|
2014-08-11 11:04:09 +02:00
|
|
|
|
|
|
|
# LOGGING #
|
2014-08-29 19:37:56 +02:00
|
|
|
publisher.info("ZMQ Indexer is Running")
|
2014-08-11 11:04:09 +02:00
|
|
|
|
|
|
|
while True:
|
2014-08-14 17:55:18 +02:00
|
|
|
try:
|
2014-08-29 19:37:56 +02:00
|
|
|
message = p.get_from_set()
|
2014-08-11 11:04:09 +02:00
|
|
|
|
2014-08-14 17:55:18 +02:00
|
|
|
if message is not None:
|
2014-08-29 19:37:56 +02:00
|
|
|
PST = Paste.Paste(message)
|
2014-08-11 11:04:09 +02:00
|
|
|
else:
|
2014-08-29 19:37:56 +02:00
|
|
|
publisher.debug("Script Indexer is idling 1s")
|
2014-08-11 11:04:09 +02:00
|
|
|
time.sleep(1)
|
|
|
|
continue
|
2014-08-14 17:55:18 +02:00
|
|
|
docpath = message.split(" ", -1)[-1]
|
2014-08-11 11:04:09 +02:00
|
|
|
paste = PST.get_p_content()
|
2017-03-15 11:51:35 +01:00
|
|
|
print "Indexing - "+indexname+" :", docpath
|
2017-03-14 10:37:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
if time.time() - last_refresh > TIME_WAIT: #avoid calculating the index's size at each message
|
|
|
|
last_refresh = time.time()
|
2017-04-18 16:02:22 +02:00
|
|
|
if check_index_size(baseindexpath, indexname) >= INDEX_SIZE_THRESHOLD*(1000*1000):
|
2017-03-15 11:51:35 +01:00
|
|
|
timestamp = int(time.time())
|
2017-03-15 14:05:13 +01:00
|
|
|
print("Creating new index", timestamp)
|
2017-03-15 11:51:35 +01:00
|
|
|
indexpath = join(baseindexpath, str(timestamp))
|
|
|
|
indexname = str(timestamp)
|
2017-03-15 14:05:13 +01:00
|
|
|
#update all_index
|
2017-03-14 10:37:31 +01:00
|
|
|
with open(indexRegister_path, "a") as f:
|
2017-08-24 16:43:42 +02:00
|
|
|
f.write('\n'+str(timestamp))
|
2017-03-15 14:05:13 +01:00
|
|
|
#create new dir
|
|
|
|
os.mkdir(indexpath)
|
|
|
|
ix = create_in(indexpath, schema)
|
2017-03-14 10:37:31 +01:00
|
|
|
|
|
|
|
|
2014-08-11 11:04:09 +02:00
|
|
|
if indexertype == "whoosh":
|
|
|
|
indexwriter = ix.writer()
|
2014-08-14 17:55:18 +02:00
|
|
|
indexwriter.update_document(
|
|
|
|
title=unicode(docpath, errors='ignore'),
|
|
|
|
path=unicode(docpath, errors='ignore'),
|
|
|
|
content=unicode(paste, errors='ignore'))
|
2014-08-11 11:04:09 +02:00
|
|
|
indexwriter.commit()
|
|
|
|
except IOError:
|
2014-08-14 17:55:18 +02:00
|
|
|
print "CRC Checksum Failed on :", PST.p_path
|
2014-08-19 19:07:07 +02:00
|
|
|
publisher.error('Duplicate;{};{};{};CRC Checksum Failed'.format(
|
|
|
|
PST.p_source, PST.p_date, PST.p_name))
|