2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-12-23 10:31:26 +01:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
"""
|
2017-05-09 11:13:16 +02:00
|
|
|
The Mixer Module
|
|
|
|
================
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
This module is consuming the Redis-list created by the ZMQ_Feed_Q Module.
|
|
|
|
|
|
|
|
This module take all the feeds provided in the config.
|
|
|
|
Depending on the configuration, this module will process the feed as follow:
|
|
|
|
operation_mode 1: "Avoid any duplicate from any sources"
|
|
|
|
- The module maintain a list of content for each paste
|
|
|
|
- If the content is new, process it
|
|
|
|
- Else, do not process it but keep track for statistics on duplicate
|
|
|
|
|
|
|
|
operation_mode 2: "Keep duplicate coming from different sources"
|
|
|
|
- The module maintain a list of name given to the paste by the feeder
|
|
|
|
- If the name has not yet been seen, process it
|
|
|
|
- Elseif, the saved content associated with the paste is not the same, process it
|
|
|
|
- Else, do not process it but keep track for statistics on duplicate
|
|
|
|
|
2017-12-12 11:16:40 +01:00
|
|
|
operation_mode 3: "Don't look if duplicated content"
|
2017-08-03 00:34:53 +02:00
|
|
|
- SImply do not bother to check if it is a duplicate
|
2017-12-11 17:28:34 +01:00
|
|
|
- Simply do not bother to check if it is a duplicate
|
2017-08-03 00:34:53 +02:00
|
|
|
|
2017-01-09 17:01:10 +01:00
|
|
|
Note that the hash of the content is defined as the sha1(gzip64encoded).
|
2017-01-09 14:12:26 +01:00
|
|
|
|
|
|
|
Every data coming from a named feed can be sent to a pre-processing module before going to the global module.
|
2017-05-09 11:13:16 +02:00
|
|
|
The mapping can be done via the variable FEED_QUEUE_MAPPING
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
import base64
|
2017-01-09 17:01:10 +01:00
|
|
|
import hashlib
|
2016-12-23 10:31:26 +01:00
|
|
|
import os
|
|
|
|
import time
|
|
|
|
from pubsublogger import publisher
|
|
|
|
import redis
|
2018-04-12 17:06:57 +02:00
|
|
|
import configparser
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
from Helper import Process
|
|
|
|
|
|
|
|
|
|
|
|
# CONFIG #
|
|
|
|
refresh_time = 30
|
2017-05-09 11:13:16 +02:00
|
|
|
FEED_QUEUE_MAPPING = { "feeder2": "preProcess1" } # Map a feeder name to a pre-processing module
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
publisher.port = 6380
|
|
|
|
publisher.channel = 'Script'
|
|
|
|
|
|
|
|
config_section = 'Mixer'
|
|
|
|
|
|
|
|
p = Process(config_section)
|
|
|
|
|
|
|
|
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
|
|
|
if not os.path.exists(configfile):
|
|
|
|
raise Exception('Unable to find the configuration file. \
|
|
|
|
Did you set environment variables? \
|
|
|
|
Or activate the virtualenv.')
|
|
|
|
|
2018-04-12 17:06:57 +02:00
|
|
|
cfg = configparser.ConfigParser()
|
2016-12-23 10:31:26 +01:00
|
|
|
cfg.read(configfile)
|
|
|
|
|
|
|
|
# REDIS #
|
|
|
|
server = redis.StrictRedis(
|
2017-01-09 14:12:26 +01:00
|
|
|
host=cfg.get("Redis_Mixer_Cache", "host"),
|
|
|
|
port=cfg.getint("Redis_Mixer_Cache", "port"),
|
2018-05-04 13:53:29 +02:00
|
|
|
db=cfg.getint("Redis_Mixer_Cache", "db"),
|
|
|
|
decode_responses=True)
|
2016-12-23 10:31:26 +01:00
|
|
|
|
2018-08-02 15:29:13 +02:00
|
|
|
server_cache = redis.StrictRedis(
|
|
|
|
host=cfg.get("Redis_Log_submit", "host"),
|
|
|
|
port=cfg.getint("Redis_Log_submit", "port"),
|
|
|
|
db=cfg.getint("Redis_Log_submit", "db"),
|
|
|
|
decode_responses=True)
|
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
# LOGGING #
|
|
|
|
publisher.info("Feed Script started to receive & publish.")
|
|
|
|
|
|
|
|
# OTHER CONFIG #
|
|
|
|
operation_mode = cfg.getint("Module_Mixer", "operation_mode")
|
|
|
|
ttl_key = cfg.getint("Module_Mixer", "ttl_duplicate")
|
|
|
|
|
|
|
|
# STATS #
|
|
|
|
processed_paste = 0
|
|
|
|
processed_paste_per_feeder = {}
|
|
|
|
duplicated_paste_per_feeder = {}
|
|
|
|
time_1 = time.time()
|
|
|
|
|
2017-08-03 00:34:53 +02:00
|
|
|
print('Operation mode ' + str(operation_mode))
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
while True:
|
|
|
|
|
|
|
|
message = p.get_from_set()
|
|
|
|
if message is not None:
|
|
|
|
splitted = message.split()
|
|
|
|
if len(splitted) == 2:
|
2017-01-09 14:12:26 +01:00
|
|
|
complete_paste, gzip64encoded = splitted
|
2018-04-12 17:06:57 +02:00
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
try:
|
2018-05-03 16:32:25 +02:00
|
|
|
#feeder_name = ( complete_paste.replace("archive/","") ).split("/")[0]
|
2018-06-29 13:50:41 +02:00
|
|
|
feeder_name, paste_name = complete_paste.split('>>')
|
2018-05-03 16:32:25 +02:00
|
|
|
feeder_name.replace(" ","")
|
2018-04-12 17:06:57 +02:00
|
|
|
paste_name = complete_paste
|
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
except ValueError as e:
|
|
|
|
feeder_name = "unnamed_feeder"
|
2017-01-09 14:12:26 +01:00
|
|
|
paste_name = complete_paste
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
# Processed paste
|
|
|
|
processed_paste += 1
|
|
|
|
try:
|
|
|
|
processed_paste_per_feeder[feeder_name] += 1
|
|
|
|
except KeyError as e:
|
|
|
|
# new feeder
|
|
|
|
processed_paste_per_feeder[feeder_name] = 1
|
|
|
|
duplicated_paste_per_feeder[feeder_name] = 0
|
|
|
|
|
2018-04-16 14:50:04 +02:00
|
|
|
relay_message = "{0} {1}".format(paste_name, gzip64encoded)
|
|
|
|
#relay_message = b" ".join( [paste_name, gzip64encoded] )
|
|
|
|
|
|
|
|
digest = hashlib.sha1(gzip64encoded.encode('utf8')).hexdigest()
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
# Avoid any duplicate coming from any sources
|
|
|
|
if operation_mode == 1:
|
2017-01-09 17:01:10 +01:00
|
|
|
if server.exists(digest): # Content already exists
|
2016-12-23 10:31:26 +01:00
|
|
|
#STATS
|
|
|
|
duplicated_paste_per_feeder[feeder_name] += 1
|
|
|
|
else: # New content
|
2017-01-09 14:12:26 +01:00
|
|
|
|
|
|
|
# populate Global OR populate another set based on the feeder_name
|
2017-05-09 11:13:16 +02:00
|
|
|
if feeder_name in FEED_QUEUE_MAPPING:
|
|
|
|
p.populate_set_out(relay_message, FEED_QUEUE_MAPPING[feeder_name])
|
2017-01-09 14:12:26 +01:00
|
|
|
else:
|
|
|
|
p.populate_set_out(relay_message, 'Mixer')
|
|
|
|
|
2017-01-09 17:01:10 +01:00
|
|
|
server.sadd(digest, feeder_name)
|
|
|
|
server.expire(digest, ttl_key)
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Keep duplicate coming from different sources
|
2017-08-03 00:34:53 +02:00
|
|
|
elif operation_mode == 2:
|
2017-12-12 11:16:40 +01:00
|
|
|
# Filter to avoid duplicate
|
2016-12-23 10:31:26 +01:00
|
|
|
content = server.get('HASH_'+paste_name)
|
|
|
|
if content is None:
|
|
|
|
# New content
|
|
|
|
# Store in redis for filtering
|
2017-01-09 17:01:10 +01:00
|
|
|
server.set('HASH_'+paste_name, digest)
|
2016-12-23 10:31:26 +01:00
|
|
|
server.sadd(paste_name, feeder_name)
|
|
|
|
server.expire(paste_name, ttl_key)
|
|
|
|
server.expire('HASH_'+paste_name, ttl_key)
|
2017-01-09 14:12:26 +01:00
|
|
|
|
|
|
|
# populate Global OR populate another set based on the feeder_name
|
2017-05-09 11:13:16 +02:00
|
|
|
if feeder_name in FEED_QUEUE_MAPPING:
|
|
|
|
p.populate_set_out(relay_message, FEED_QUEUE_MAPPING[feeder_name])
|
2017-01-09 14:12:26 +01:00
|
|
|
else:
|
|
|
|
p.populate_set_out(relay_message, 'Mixer')
|
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
else:
|
2017-01-09 17:01:10 +01:00
|
|
|
if digest != content:
|
2016-12-23 10:31:26 +01:00
|
|
|
# Same paste name but different content
|
|
|
|
#STATS
|
|
|
|
duplicated_paste_per_feeder[feeder_name] += 1
|
|
|
|
server.sadd(paste_name, feeder_name)
|
|
|
|
server.expire(paste_name, ttl_key)
|
2017-01-09 14:12:26 +01:00
|
|
|
|
|
|
|
# populate Global OR populate another set based on the feeder_name
|
2017-05-09 11:13:16 +02:00
|
|
|
if feeder_name in FEED_QUEUE_MAPPING:
|
|
|
|
p.populate_set_out(relay_message, FEED_QUEUE_MAPPING[feeder_name])
|
2017-01-09 14:12:26 +01:00
|
|
|
else:
|
|
|
|
p.populate_set_out(relay_message, 'Mixer')
|
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
else:
|
|
|
|
# Already processed
|
|
|
|
# Keep track of processed pastes
|
|
|
|
#STATS
|
|
|
|
duplicated_paste_per_feeder[feeder_name] += 1
|
|
|
|
continue
|
2017-08-03 00:34:53 +02:00
|
|
|
else:
|
|
|
|
# populate Global OR populate another set based on the feeder_name
|
|
|
|
if feeder_name in FEED_QUEUE_MAPPING:
|
|
|
|
p.populate_set_out(relay_message, FEED_QUEUE_MAPPING[feeder_name])
|
|
|
|
else:
|
|
|
|
p.populate_set_out(relay_message, 'Mixer')
|
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
|
|
|
|
else:
|
|
|
|
# TODO Store the name of the empty paste inside a Redis-list.
|
2018-04-12 17:06:57 +02:00
|
|
|
print("Empty Paste: not processed")
|
2016-12-23 10:31:26 +01:00
|
|
|
publisher.debug("Empty Paste: {0} not processed".format(message))
|
|
|
|
else:
|
2018-04-12 17:06:57 +02:00
|
|
|
print("Empty Queues: Waiting...")
|
2018-08-02 15:29:13 +02:00
|
|
|
|
2016-12-23 10:31:26 +01:00
|
|
|
if int(time.time() - time_1) > refresh_time:
|
2018-08-02 15:29:13 +02:00
|
|
|
# update internal feeder
|
|
|
|
list_feeder = server_cache.hkeys("mixer_cache:list_feeder")
|
|
|
|
if list_feeder:
|
|
|
|
for feeder in list_feeder:
|
|
|
|
count = int(server_cache.hget("mixer_cache:list_feeder", feeder))
|
|
|
|
if count is None:
|
|
|
|
count = 0
|
|
|
|
processed_paste_per_feeder[feeder] = processed_paste_per_feeder.get(feeder, 0) + count
|
|
|
|
processed_paste = processed_paste + count
|
2018-04-12 17:06:57 +02:00
|
|
|
print(processed_paste_per_feeder)
|
2016-12-23 15:44:46 +01:00
|
|
|
to_print = 'Mixer; ; ; ;mixer_all All_feeders Processed {0} paste(s) in {1}sec'.format(processed_paste, refresh_time)
|
2018-04-12 17:06:57 +02:00
|
|
|
print(to_print)
|
2016-12-23 10:31:26 +01:00
|
|
|
publisher.info(to_print)
|
|
|
|
processed_paste = 0
|
|
|
|
|
2018-04-12 17:06:57 +02:00
|
|
|
for feeder, count in processed_paste_per_feeder.items():
|
2016-12-23 10:31:26 +01:00
|
|
|
to_print = 'Mixer; ; ; ;mixer_{0} {0} Processed {1} paste(s) in {2}sec'.format(feeder, count, refresh_time)
|
2018-04-12 17:06:57 +02:00
|
|
|
print(to_print)
|
2016-12-23 10:31:26 +01:00
|
|
|
publisher.info(to_print)
|
|
|
|
processed_paste_per_feeder[feeder] = 0
|
|
|
|
|
2018-04-12 17:06:57 +02:00
|
|
|
for feeder, count in duplicated_paste_per_feeder.items():
|
2016-12-23 10:31:26 +01:00
|
|
|
to_print = 'Mixer; ; ; ;mixer_{0} {0} Duplicated {1} paste(s) in {2}sec'.format(feeder, count, refresh_time)
|
2018-04-12 17:06:57 +02:00
|
|
|
print(to_print)
|
2016-12-23 10:31:26 +01:00
|
|
|
publisher.info(to_print)
|
|
|
|
duplicated_paste_per_feeder[feeder] = 0
|
|
|
|
|
|
|
|
time_1 = time.time()
|
2018-08-02 15:29:13 +02:00
|
|
|
|
|
|
|
# delete internal feeder list
|
|
|
|
server_cache.delete("mixer_cache:list_feeder")
|
2016-12-23 10:31:26 +01:00
|
|
|
time.sleep(0.5)
|
|
|
|
continue
|