Merge pull request #404 from WimpyMan/master

Added: IP matching module
pull/417/head
Thirion Aurélien 2019-10-04 13:50:53 +02:00 committed by GitHub
commit dc25cd98ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 92 additions and 0 deletions

81
bin/IPAddress.py Executable file
View File

@ -0,0 +1,81 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The IP Module
======================
This module is consuming the global channel.
It first performs a regex to find IP addresses and then matches those IPs to
some configured ip ranges.
The list of IP ranges are expected to be in CIDR format (e.g. 192.168.0.0/16)
and should be defined in the config.cfg file, under the [IP] section
"""
import time
import re
from pubsublogger import publisher
from packages import Paste
from Helper import Process
from ipaddress import IPv4Network, IPv4Address
def search_ip(message):
paste = Paste.Paste(message)
content = paste.get_p_content()
# regex to find IPs
reg_ip = re.compile(r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)', flags=re.MULTILINE)
# list of the regex results in the Paste, may be null
results = reg_ip.findall(content)
matching_ips = []
for res in results:
address = IPv4Address(res)
for network in ip_networks:
if address in network:
matching_ips.append(address)
if len(matching_ips) > 0:
print('{} contains {} IPs'.format(paste.p_name, len(matching_ips)))
publisher.warning('{} contains {} IPs'.format(paste.p_name, len(matching_ips)))
#Tag message with IP
msg = 'infoleak:automatic-detection="ip";{}'.format(message)
p.populate_set_out(msg, 'Tags')
#Send to duplicate
p.populate_set_out(message, 'Duplicate')
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)
# Port of the redis instance used by pubsublogger
publisher.port = 6380
# Script is the default channel used for the modules.
publisher.channel = 'Script'
# Section name in bin/packages/modules.cfg
config_section = 'IP'
# Setup the I/O queues
p = Process(config_section)
ip_networks = []
for network in p.config.get("IP", "networks").split(","):
ip_networks.append(IPv4Network(network))
# Sent to the logging a description of the module
publisher.info("Run IP module")
# Endless loop getting messages from the input queue
while True:
# Get one message from the input queue
message = p.get_from_set()
if message is None:
publisher.debug("{} queue is empty, waiting".format(config_section))
time.sleep(1)
continue
# Do something with the message from the queue
search_ip(message)

View File

@ -213,6 +213,8 @@ function launching_scripts {
screen -S "Script_AIL" -X screen -t "UpdateBackground" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./update-background.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "SubmitPaste" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./submit_paste.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "IPAddress" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./IPAddress.py; read x"
}

View File

@ -266,3 +266,8 @@ default_crawler_closespider_pagecount = 50
default_crawler_user_agent = Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0
splash_url = http://127.0.0.1
splash_port = 8050-8052
[IP]
# list of comma-separated CIDR that you wish to be alerted for. e.g:
#networks = 192.168.34.0/24,10.0.0.0/8,192.168.33.0/24
networks =

View File

@ -132,3 +132,7 @@ publish = Redis_Mixer
[Crawler]
subscribe = Redis_Crawler
publish = Redis_Mixer,Redis_Tags
[IP]
subscribe = Redis_Global
publish = Redis_Duplicate,Redis_Tags