add: [modules] zerobinz

pull/586/head
huynenjl@gmail.com 2022-06-03 13:30:48 +00:00
parent f4102dd242
commit d816f5fa08
4 changed files with 82 additions and 1 deletions

View File

@ -229,7 +229,8 @@ function launching_scripts {
sleep 0.1
screen -S "Script_AIL" -X screen -t "LibInjection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./LibInjection.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Zerobins" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Zerobins.py; read x"
sleep 0.1
##################################
# TRACKERS MODULES #

74
bin/modules/Zerobins.py Executable file
View File

@ -0,0 +1,74 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The Zerobins Module
======================
This module spots zerobins-like services for further processing
"""
##################################
# Import External packages
##################################
import os
import sys
import time
import pdb
import re
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from modules.abstract_module import AbstractModule
from lib import crawlers
class Zerobins(AbstractModule):
"""
Zerobins module for AIL framework
"""
def __init__(self):
super(Zerobins, self).__init__()
binz = [
r'^https:\/\/(zerobin||privatebin)\..*$', # historical ones
]
self.regex = re.compile('|'.join(binz))
# Pending time between two computation (computeNone) in seconds
self.pending_seconds = 10
# Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized')
def computeNone(self):
"""
Compute when no message in queue
"""
self.redis_logger.debug("No message in queue")
def compute(self, message):
"""regex_helper.regex_findall(self.module_name, self.redis_cache_key
Compute a message in queue
"""
print(message)
url, id = message.split()
# Extract zerobins addresses
matching_binz = self.regex_findall(self.regex, id, url)
if len(matching_binz) > 0:
for bin in matching_binz:
print("send {} to crawler".format(bin))
crawlers.create_crawler_task(bin, screenshot=False, har=False, depth_limit=1, max_pages=1, auto_crawler=False, crawler_delta=3600, crawler_type=None, cookiejar_uuid=None, user_agent=None)
self.redis_logger.debug("Compute message in queue")
if __name__ == '__main__':
module = Zerobins()
module.run()

View File

@ -46,6 +46,9 @@ class AbstractModule(ABC):
# If provided could be a namespaced channel like script:<ModuleName>
self.redis_logger.channel = logger_channel
#Cache key
self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name)
self.max_execution_time = 30
# Run module endlessly
self.proceed = True

View File

@ -165,3 +165,6 @@ publish = Redis_Mixer,Redis_Tags
[IP]
subscribe = Redis_Global
publish = Redis_Duplicate,Redis_Tags
[Zerobins]
subscribe = Redis_Url