mirror of https://github.com/CIRCL/AIL-framework
fix: [queues] fix Mixer queue
parent
4c5b1b668f
commit
72d774f37b
|
@ -228,7 +228,7 @@ class Crawler(AbstractModule):
|
||||||
gzip64encoded = crawlers.get_gzipped_b64_item(item_id, entries['html'])
|
gzip64encoded = crawlers.get_gzipped_b64_item(item_id, entries['html'])
|
||||||
# send item to Global
|
# send item to Global
|
||||||
relay_message = f'crawler {item_id} {gzip64encoded}'
|
relay_message = f'crawler {item_id} {gzip64encoded}'
|
||||||
self.send_message_to_queue(relay_message, 'Mixer')
|
self.send_message_to_queue(relay_message, 'Import')
|
||||||
|
|
||||||
# Tag
|
# Tag
|
||||||
msg = f'infoleak:submission="crawler";{item_id}'
|
msg = f'infoleak:submission="crawler";{item_id}'
|
||||||
|
|
|
@ -110,6 +110,8 @@ class SubmitPaste(AbstractModule):
|
||||||
if nb_submit > 0:
|
if nb_submit > 0:
|
||||||
try:
|
try:
|
||||||
uuid = self.r_serv_db.srandmember('submitted:uuid')
|
uuid = self.r_serv_db.srandmember('submitted:uuid')
|
||||||
|
if isinstance(uuid, list):
|
||||||
|
return uuid[0]
|
||||||
# Module processing with the message from the queue
|
# Module processing with the message from the queue
|
||||||
self.redis_logger.debug(uuid)
|
self.redis_logger.debug(uuid)
|
||||||
self.compute(uuid)
|
self.compute(uuid)
|
||||||
|
@ -294,8 +296,8 @@ class SubmitPaste(AbstractModule):
|
||||||
if self.r_serv_log_submit.get(f'{uuid}:nb_end') == self.r_serv_log_submit.get(f'{uuid}:nb_total'):
|
if self.r_serv_log_submit.get(f'{uuid}:nb_end') == self.r_serv_log_submit.get(f'{uuid}:nb_total'):
|
||||||
self.r_serv_log_submit.set(f'{uuid}:end', 1)
|
self.r_serv_log_submit.set(f'{uuid}:end', 1)
|
||||||
|
|
||||||
self.redis_logger.debug(f' {rel_item_path} send to Global')
|
self.redis_logger.debug(f' {rel_item_path} send to Mixer')
|
||||||
print(f' {rel_item_path} send to Global')
|
print(f' {rel_item_path} send to Mixer')
|
||||||
self.r_serv_log_submit.sadd(f'{uuid}:paste_submit_link', rel_item_path)
|
self.r_serv_log_submit.sadd(f'{uuid}:paste_submit_link', rel_item_path)
|
||||||
|
|
||||||
curr_date = datetime.date.today()
|
curr_date = datetime.date.today()
|
||||||
|
|
|
@ -12,10 +12,10 @@ subscribe = Redis_Import
|
||||||
publish = Redis_Mixer
|
publish = Redis_Mixer
|
||||||
|
|
||||||
[Sync_importer]
|
[Sync_importer]
|
||||||
publish = Redis_Mixer,Redis_Tags
|
publish = Redis_Import,Redis_Tags
|
||||||
|
|
||||||
[Importer_Json]
|
[Importer_Json]
|
||||||
publish = Redis_Mixer,Redis_Tags
|
publish = Redis_Import,Redis_Tags
|
||||||
|
|
||||||
[Global]
|
[Global]
|
||||||
subscribe = Redis_Mixer
|
subscribe = Redis_Mixer
|
||||||
|
@ -154,11 +154,10 @@ subscribe = Redis_Global
|
||||||
publish = Redis_Tags
|
publish = Redis_Tags
|
||||||
|
|
||||||
[submit_paste]
|
[submit_paste]
|
||||||
subscribe = Redis
|
publish = Redis_Import
|
||||||
publish = Redis_Mixer
|
|
||||||
|
|
||||||
[Crawler]
|
[Crawler]
|
||||||
publish = Redis_Mixer,Redis_Tags
|
publish = Redis_Import,Redis_Tags
|
||||||
|
|
||||||
[IP]
|
[IP]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
|
|
|
@ -24,7 +24,6 @@ from lib import item_basic
|
||||||
from lib.objects.Items import Item
|
from lib.objects.Items import Item
|
||||||
from lib.objects.Screenshots import Screenshot
|
from lib.objects.Screenshots import Screenshot
|
||||||
from lib import Tag
|
from lib import Tag
|
||||||
from export import Export
|
|
||||||
|
|
||||||
from lib import module_extractor
|
from lib import module_extractor
|
||||||
|
|
||||||
|
@ -73,14 +72,15 @@ def showItem(): # # TODO: support post
|
||||||
meta['father'] = item_basic.get_item_parent(item_id)
|
meta['father'] = item_basic.get_item_parent(item_id)
|
||||||
## EXPORT SECTION
|
## EXPORT SECTION
|
||||||
# # TODO: ADD in Export SECTION
|
# # TODO: ADD in Export SECTION
|
||||||
meta['hive_case'] = Export.get_item_hive_cases(item_id)
|
# meta['hive_case'] = Export.get_item_hive_cases(item_id)
|
||||||
|
meta['hive_case'] = None
|
||||||
|
|
||||||
extracted = module_extractor.extract(item.id, content=meta['content'])
|
extracted = module_extractor.extract(item.id, content=meta['content'])
|
||||||
extracted_matches = module_extractor.get_extracted_by_match(extracted)
|
extracted_matches = module_extractor.get_extracted_by_match(extracted)
|
||||||
|
|
||||||
return render_template("show_item.html", bootstrap_label=bootstrap_label,
|
return render_template("show_item.html", bootstrap_label=bootstrap_label,
|
||||||
modal_add_tags=Tag.get_modal_add_tags(meta['id'], object_type='item'),
|
modal_add_tags=Tag.get_modal_add_tags(meta['id'], object_type='item'),
|
||||||
is_hive_connected=Export.get_item_hive_cases(item_id),
|
is_hive_connected=False,
|
||||||
meta=meta,
|
meta=meta,
|
||||||
extracted=extracted, extracted_matches=extracted_matches)
|
extracted=extracted, extracted_matches=extracted_matches)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue