2014-08-11 11:04:09 +02:00
|
|
|
#!/usr/bin/env python2
|
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
|
|
|
"""
|
|
|
|
The ZMQ_Sub_Indexer Module
|
|
|
|
============================
|
|
|
|
|
|
|
|
The ZMQ_Sub_Indexer modules is fetching the list of files to be processed
|
|
|
|
and index each file with a full-text indexer (Whoosh until now).
|
|
|
|
|
|
|
|
"""
|
2014-08-14 17:55:18 +02:00
|
|
|
import redis
|
|
|
|
import ConfigParser
|
|
|
|
import time
|
|
|
|
from packages import Paste
|
2014-08-11 11:04:09 +02:00
|
|
|
from packages import ZMQ_PubSub
|
|
|
|
from pubsublogger import publisher
|
|
|
|
|
|
|
|
from whoosh.index import create_in, exists_in, open_dir
|
2014-08-14 17:55:18 +02:00
|
|
|
from whoosh.fields import Schema, TEXT, ID
|
2014-08-11 11:04:09 +02:00
|
|
|
import os
|
|
|
|
|
|
|
|
configfile = './packages/config.cfg'
|
|
|
|
|
2014-08-14 17:55:18 +02:00
|
|
|
|
2014-08-11 11:04:09 +02:00
|
|
|
def main():
|
|
|
|
"""Main Function"""
|
|
|
|
|
|
|
|
# CONFIG #
|
|
|
|
cfg = ConfigParser.ConfigParser()
|
|
|
|
cfg.read(configfile)
|
|
|
|
|
|
|
|
# Redis
|
|
|
|
r_serv1 = redis.StrictRedis(
|
2014-08-14 17:55:18 +02:00
|
|
|
host=cfg.get("Redis_Queues", "host"),
|
|
|
|
port=cfg.getint("Redis_Queues", "port"),
|
|
|
|
db=cfg.getint("Redis_Queues", "db"))
|
2014-08-11 11:04:09 +02:00
|
|
|
|
|
|
|
# Indexer configuration - index dir and schema setup
|
|
|
|
indexpath = cfg.get("Indexer", "path")
|
|
|
|
indexertype = cfg.get("Indexer", "type")
|
|
|
|
if indexertype == "whoosh":
|
2014-08-14 17:55:18 +02:00
|
|
|
schema = Schema(title=TEXT(stored=True), path=ID(stored=True, unique=True), content=TEXT)
|
2014-08-11 11:04:09 +02:00
|
|
|
if not os.path.exists(indexpath):
|
|
|
|
os.mkdir(indexpath)
|
|
|
|
if not exists_in(indexpath):
|
|
|
|
ix = create_in(indexpath, schema)
|
|
|
|
else:
|
|
|
|
ix = open_dir(indexpath)
|
|
|
|
|
|
|
|
# LOGGING #
|
|
|
|
publisher.channel = "Script"
|
|
|
|
|
|
|
|
# ZMQ #
|
2014-08-14 17:55:18 +02:00
|
|
|
# Subscriber
|
2014-08-11 11:04:09 +02:00
|
|
|
channel = cfg.get("PubSub_Global", "channel")
|
|
|
|
subscriber_name = "indexer"
|
|
|
|
subscriber_config_section = "PubSub_Global"
|
|
|
|
|
2014-08-14 17:55:18 +02:00
|
|
|
sub = ZMQ_PubSub.ZMQSub(configfile, subscriber_config_section, channel, subscriber_name)
|
2014-08-11 11:04:09 +02:00
|
|
|
|
|
|
|
# FUNCTIONS #
|
|
|
|
publisher.info("""ZMQ Indexer is Running""")
|
|
|
|
|
|
|
|
while True:
|
2014-08-14 17:55:18 +02:00
|
|
|
try:
|
|
|
|
message = sub.get_msg_from_queue(r_serv1)
|
2014-08-11 11:04:09 +02:00
|
|
|
|
2014-08-14 17:55:18 +02:00
|
|
|
if message is not None:
|
|
|
|
PST = Paste.Paste(message.split(" ", -1)[-1])
|
2014-08-11 11:04:09 +02:00
|
|
|
else:
|
|
|
|
if r_serv1.sismember("SHUTDOWN_FLAGS", "Indexer"):
|
|
|
|
r_serv1.srem("SHUTDOWN_FLAGS", "Indexer")
|
|
|
|
publisher.warning("Shutdown Flag Up: Terminating.")
|
|
|
|
break
|
|
|
|
publisher.debug("Script Indexer is idling 10s")
|
|
|
|
time.sleep(1)
|
|
|
|
continue
|
2014-08-14 17:55:18 +02:00
|
|
|
docpath = message.split(" ", -1)[-1]
|
2014-08-11 11:04:09 +02:00
|
|
|
paste = PST.get_p_content()
|
|
|
|
print "Indexing :", docpath
|
|
|
|
if indexertype == "whoosh":
|
|
|
|
indexwriter = ix.writer()
|
2014-08-14 17:55:18 +02:00
|
|
|
indexwriter.update_document(
|
|
|
|
title=unicode(docpath, errors='ignore'),
|
|
|
|
path=unicode(docpath, errors='ignore'),
|
|
|
|
content=unicode(paste, errors='ignore'))
|
2014-08-11 11:04:09 +02:00
|
|
|
indexwriter.commit()
|
|
|
|
except IOError:
|
2014-08-14 17:55:18 +02:00
|
|
|
print "CRC Checksum Failed on :", PST.p_path
|
|
|
|
publisher.error('Duplicate;{};{};{};CRC Checksum Failed'.format(PST.p_source, PST.p_date, PST.p_name))
|
|
|
|
pass
|
2014-08-11 11:04:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|