AIL-framework/bin/ZMQ_PubSub_Tokenize.py

66 lines
1.9 KiB
Python
Raw Normal View History

#!/usr/bin/env python2
# -*-coding:UTF-8 -*
"""
The ZMQ_PubSub_Lines Module
============================
2014-08-19 19:07:07 +02:00
This module is consuming the Redis-list created by the ZMQ_PubSub_Tokenize_Q
Module.
2014-08-19 19:07:07 +02:00
It tokenize the content of the paste and publish the result in the following
format:
channel_name+' '+/path/of/the/paste.gz+' '+tokenized_word+' '+scoring
..seealso:: Paste method (_get_top_words)
..note:: Module ZMQ_Something_Q and ZMQ_Something are closely bound, always put
the same Subscriber name in both of them.
Requirements
------------
*Need running Redis instances. (Redis)
*Need the ZMQ_PubSub_Tokenize_Q Module running to be able to work properly.
"""
2014-08-14 17:55:18 +02:00
import time
from packages import Paste
from pubsublogger import publisher
2014-08-19 19:07:07 +02:00
import Helper
2014-08-19 19:07:07 +02:00
if __name__ == "__main__":
publisher.channel = "Script"
2014-08-19 19:07:07 +02:00
config_section = 'PubSub_Longlines'
config_channel = 'channel_1'
subscriber_name = 'tokenize'
2014-08-19 19:07:07 +02:00
h = Helper.Redis_Queues(config_section, config_channel, subscriber_name)
2014-08-20 15:14:57 +02:00
# Publisher
pub_config_section = 'PubSub_Words'
pub_config_channel = 'channel_0'
h.zmq_pub(pub_config_section, pub_config_channel)
2014-08-19 19:07:07 +02:00
# LOGGING #
publisher.info("Tokeniser subscribed to channel {}".format(h.sub_channel))
while True:
2014-08-20 15:14:57 +02:00
message = h.redis_rpop()
print message
2014-08-14 17:55:18 +02:00
if message is not None:
2014-08-19 19:07:07 +02:00
paste = Paste.Paste(message.split(" ", -1)[-1])
for word, score in paste._get_top_words().items():
if len(word) >= 4:
2014-08-20 15:14:57 +02:00
h.zmq_pub_send('{} {} {}'.format(paste.p_path, word,
score))
else:
2014-08-19 19:07:07 +02:00
if h.redis_queue_shutdown():
print "Shutdown Flag Up: Terminating"
publisher.warning("Shutdown Flag Up: Terminating.")
break
publisher.debug("Tokeniser is idling 10s")
time.sleep(10)
print "sleepin"