2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2014-08-06 11:43:40 +02:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
"""
|
2017-05-09 11:13:16 +02:00
|
|
|
The Tokenize Module
|
|
|
|
===================
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2014-08-19 19:07:07 +02:00
|
|
|
This module is consuming the Redis-list created by the ZMQ_PubSub_Tokenize_Q
|
|
|
|
Module.
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2014-08-19 19:07:07 +02:00
|
|
|
It tokenize the content of the paste and publish the result in the following
|
|
|
|
format:
|
2014-08-06 11:43:40 +02:00
|
|
|
channel_name+' '+/path/of/the/paste.gz+' '+tokenized_word+' '+scoring
|
|
|
|
|
|
|
|
..seealso:: Paste method (_get_top_words)
|
|
|
|
|
|
|
|
..note:: Module ZMQ_Something_Q and ZMQ_Something are closely bound, always put
|
|
|
|
the same Subscriber name in both of them.
|
|
|
|
|
|
|
|
Requirements
|
|
|
|
------------
|
|
|
|
|
|
|
|
*Need running Redis instances. (Redis)
|
|
|
|
*Need the ZMQ_PubSub_Tokenize_Q Module running to be able to work properly.
|
|
|
|
|
|
|
|
"""
|
2014-08-14 17:55:18 +02:00
|
|
|
import time
|
|
|
|
from packages import Paste
|
2014-08-06 11:43:40 +02:00
|
|
|
from pubsublogger import publisher
|
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
from Helper import Process
|
2017-01-12 08:32:55 +01:00
|
|
|
import signal
|
|
|
|
|
|
|
|
class TimeoutException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def timeout_handler(signum, frame):
|
|
|
|
raise TimeoutException
|
|
|
|
|
|
|
|
signal.signal(signal.SIGALRM, timeout_handler)
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2014-08-19 19:07:07 +02:00
|
|
|
if __name__ == "__main__":
|
2014-08-22 17:35:40 +02:00
|
|
|
publisher.port = 6380
|
2014-08-06 11:43:40 +02:00
|
|
|
publisher.channel = "Script"
|
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
config_section = 'Tokenize'
|
|
|
|
p = Process(config_section)
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2014-08-19 19:07:07 +02:00
|
|
|
# LOGGING #
|
2014-08-29 19:37:56 +02:00
|
|
|
publisher.info("Tokeniser started")
|
2014-08-06 11:43:40 +02:00
|
|
|
|
|
|
|
while True:
|
2014-08-29 19:37:56 +02:00
|
|
|
message = p.get_from_set()
|
2018-04-16 14:50:04 +02:00
|
|
|
print(message)
|
2014-08-14 17:55:18 +02:00
|
|
|
if message is not None:
|
2014-08-29 19:37:56 +02:00
|
|
|
paste = Paste.Paste(message)
|
2017-01-12 08:32:55 +01:00
|
|
|
signal.alarm(5)
|
|
|
|
try:
|
|
|
|
for word, score in paste._get_top_words().items():
|
|
|
|
if len(word) >= 4:
|
|
|
|
msg = '{} {} {}'.format(paste.p_path, word, score)
|
|
|
|
p.populate_set_out(msg)
|
|
|
|
except TimeoutException:
|
2018-06-29 10:02:29 +02:00
|
|
|
p.incr_module_timeout_statistic()
|
|
|
|
print ("{0} processing timeout".format(paste.p_path))
|
|
|
|
continue
|
2017-01-12 08:32:55 +01:00
|
|
|
else:
|
|
|
|
signal.alarm(0)
|
2014-08-06 11:43:40 +02:00
|
|
|
else:
|
|
|
|
publisher.debug("Tokeniser is idling 10s")
|
|
|
|
time.sleep(10)
|
2018-04-16 14:50:04 +02:00
|
|
|
print("Sleeping")
|