mirror of https://github.com/CIRCL/AIL-framework
commit
af8736f698
|
@ -99,8 +99,8 @@ if __name__ == "__main__":
|
|||
publisher.warning(to_print)
|
||||
#Send to duplicate
|
||||
p.populate_set_out(filepath, 'Duplicate')
|
||||
#Send to BrowseWarningPaste
|
||||
p.populate_set_out('credential;{}'.format(filepath), 'BrowseWarningPaste')
|
||||
#Send to alertHandler
|
||||
p.populate_set_out('credential;{}'.format(filepath), 'alertHandler')
|
||||
|
||||
#Put in form, count occurences, then send to moduleStats
|
||||
creds_sites = {}
|
||||
|
|
|
@ -79,7 +79,7 @@ if __name__ == "__main__":
|
|||
#Send to duplicate
|
||||
p.populate_set_out(filename, 'Duplicate')
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('creditcard;{}'.format(filename), 'BrowseWarningPaste')
|
||||
p.populate_set_out('creditcard;{}'.format(filename), 'alertHandler')
|
||||
else:
|
||||
publisher.info('{}CreditCard related;{}'.format(to_print, paste.p_path))
|
||||
else:
|
||||
|
|
|
@ -32,7 +32,7 @@ def search_cve(message):
|
|||
publisher.warning('{} contains CVEs'.format(paste.p_name))
|
||||
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('cve;{}'.format(filepath), 'BrowseWarningPaste')
|
||||
p.populate_set_out('cve;{}'.format(filepath), 'alertHandler')
|
||||
#Send to duplicate
|
||||
p.populate_set_out(filepath, 'Duplicate')
|
||||
|
||||
|
|
|
@ -12,7 +12,11 @@ the same Subscriber name in both of them.
|
|||
|
||||
"""
|
||||
import redis
|
||||
import ConfigParser
|
||||
try: # dirty to support python3
|
||||
import ConfigParser
|
||||
except:
|
||||
import configparser
|
||||
ConfigParser = configparser
|
||||
import os
|
||||
import zmq
|
||||
import time
|
||||
|
|
|
@ -26,7 +26,7 @@ def search_gpg(message):
|
|||
#Send to duplicate
|
||||
p.populate_set_out(message, 'Duplicate')
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('keys;{}'.format(message), 'BrowseWarningPaste')
|
||||
p.populate_set_out('keys;{}'.format(message), 'alertHandler')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -170,7 +170,7 @@ function launching_scripts {
|
|||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "BrowseWarningPaste" bash -c './BrowseWarningPaste.py; read x'
|
||||
screen -S "Script_AIL" -X screen -t "alertHandler" bash -c './alertHandler.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ if __name__ == "__main__":
|
|||
publisher.warning(to_print)
|
||||
#Send to duplicate
|
||||
p.populate_set_out(filename, 'Duplicate')
|
||||
p.populate_set_out('mail;{}'.format(filename), 'BrowseWarningPaste')
|
||||
p.populate_set_out('mail;{}'.format(filename), 'alertHandler')
|
||||
|
||||
else:
|
||||
publisher.info(to_print)
|
||||
|
|
|
@ -145,7 +145,7 @@ if __name__ == "__main__":
|
|||
PST.p_name)
|
||||
for url in fetch(p, r_cache, urls, domains_list, path):
|
||||
publisher.warning('{}Checked {};{}'.format(to_print, url, PST.p_path))
|
||||
p.populate_set_out('onion;{}'.format(PST.p_path), 'BrowseWarningPaste')
|
||||
p.populate_set_out('onion;{}'.format(PST.p_path), 'alertHandler')
|
||||
else:
|
||||
publisher.info('{}Onion related;{}'.format(to_print, PST.p_path))
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ def search_phone(message):
|
|||
print results
|
||||
publisher.warning('{} contains PID (phone numbers)'.format(paste.p_name))
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('phone;{}'.format(message), 'BrowseWarningPaste')
|
||||
p.populate_set_out('phone;{}'.format(message), 'alertHandler')
|
||||
#Send to duplicate
|
||||
p.populate_set_out(message, 'Duplicate')
|
||||
stats = {}
|
||||
|
|
|
@ -81,7 +81,7 @@ def analyse(url, path):
|
|||
#Send to duplicate
|
||||
p.populate_set_out(path, 'Duplicate')
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('sqlinjection;{}'.format(path), 'BrowseWarningPaste')
|
||||
p.populate_set_out('sqlinjection;{}'.format(path), 'alertHandler')
|
||||
else:
|
||||
print "Potential SQL injection:"
|
||||
print urllib2.unquote(url)
|
||||
|
|
|
@ -0,0 +1,131 @@
|
|||
#!/usr/bin/env python3.5
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
|
||||
import configparser
|
||||
from packages import Paste
|
||||
import datetime
|
||||
import json
|
||||
from io import BytesIO
|
||||
|
||||
class AilleakObject(AbstractMISPObjectGenerator):
|
||||
def __init__(self, moduleName, p_source, p_date, p_content, p_duplicate, p_duplicate_number):
|
||||
super(AbstractMISPObjectGenerator, self).__init__('ail-leak')
|
||||
self._moduleName = moduleName
|
||||
self._p_source = p_source.split('/')[-5:]
|
||||
self._p_source = '/'.join(self._p_source)[:-3] # -3 removes .gz
|
||||
self._p_date = p_date
|
||||
self._p_content = p_content.encode('utf8')
|
||||
self._p_duplicate = p_duplicate
|
||||
self._p_duplicate_number = p_duplicate_number
|
||||
self.generate_attributes()
|
||||
|
||||
def generate_attributes(self):
|
||||
self.add_attribute('type', value=self._moduleName)
|
||||
self.add_attribute('origin', value=self._p_source, type='text')
|
||||
self.add_attribute('last-seen', value=self._p_date)
|
||||
if self._p_duplicate_number > 0:
|
||||
self.add_attribute('duplicate', value=self._p_duplicate, type='text')
|
||||
self.add_attribute('duplicate_number', value=self._p_duplicate_number, type='counter')
|
||||
self._pseudofile = BytesIO(self._p_content)
|
||||
self.add_attribute('raw-data', value=self._p_source, data=self._pseudofile, type="attachment")
|
||||
|
||||
class ObjectWrapper:
|
||||
def __init__(self, pymisp):
|
||||
self.pymisp = pymisp
|
||||
self.currentID_date = None
|
||||
self.eventID_to_push = self.get_daily_event_id()
|
||||
cfg = configparser.ConfigParser()
|
||||
cfg.read('./packages/config.cfg')
|
||||
self.maxDuplicateToPushToMISP = cfg.getint("ailleakObject", "maxDuplicateToPushToMISP")
|
||||
|
||||
def add_new_object(self, moduleName, path):
|
||||
self.moduleName = moduleName
|
||||
self.path = path
|
||||
self.paste = Paste.Paste(path)
|
||||
self.p_date = self.date_to_str(self.paste.p_date)
|
||||
self.p_source = self.paste.p_path
|
||||
self.p_content = self.paste.get_p_content().decode('utf8')
|
||||
|
||||
temp = self.paste._get_p_duplicate()
|
||||
try:
|
||||
temp = temp.decode('utf8')
|
||||
except AttributeError:
|
||||
pass
|
||||
#beautifier
|
||||
temp = json.loads(temp)
|
||||
self.p_duplicate_number = len(temp) if len(temp) >= 0 else 0
|
||||
to_ret = ""
|
||||
for dup in temp[:self.maxDuplicateToPushToMISP]:
|
||||
algo = dup[0]
|
||||
path = dup[1].split('/')[-5:]
|
||||
path = '/'.join(path)[:-3] # -3 removes .gz
|
||||
perc = dup[2]
|
||||
to_ret += "{}: {} [{}%]\n".format(path, algo, perc)
|
||||
self.p_duplicate = to_ret
|
||||
|
||||
self.mispObject = AilleakObject(self.moduleName, self.p_source, self.p_date, self.p_content, self.p_duplicate, self.p_duplicate_number)
|
||||
|
||||
def date_to_str(self, date):
|
||||
return "{0}-{1}-{2}".format(date.year, date.month, date.day)
|
||||
|
||||
def get_all_related_events(self):
|
||||
to_search = "Daily AIL-leaks"
|
||||
result = self.pymisp.search_all(to_search)
|
||||
events = []
|
||||
for e in result['response']:
|
||||
events.append({'id': e['Event']['id'], 'org_id': e['Event']['org_id'], 'info': e['Event']['info']})
|
||||
return events
|
||||
|
||||
def get_daily_event_id(self):
|
||||
to_match = "Daily AIL-leaks {}".format(datetime.date.today())
|
||||
events = self.get_all_related_events()
|
||||
for dic in events:
|
||||
info = dic['info']
|
||||
e_id = dic['id']
|
||||
if info == to_match:
|
||||
print('Found: ', info, '->', e_id)
|
||||
self.currentID_date = datetime.date.today()
|
||||
return e_id
|
||||
created_event = self.create_daily_event()['Event']
|
||||
new_id = created_event['id']
|
||||
print('New event created:', new_id)
|
||||
self.currentID_date = datetime.date.today()
|
||||
return new_id
|
||||
|
||||
|
||||
def create_daily_event(self):
|
||||
today = datetime.date.today()
|
||||
# [0-3]
|
||||
distribution = 0
|
||||
info = "Daily AIL-leaks {}".format(today)
|
||||
# [0-2]
|
||||
analysis = 0
|
||||
# [1-4]
|
||||
threat = 3
|
||||
published = False
|
||||
org_id = None
|
||||
orgc_id = None
|
||||
sharing_group_id = None
|
||||
date = None
|
||||
event = self.pymisp.new_event(distribution, threat,
|
||||
analysis, info, date,
|
||||
published, orgc_id, org_id, sharing_group_id)
|
||||
return event
|
||||
|
||||
# Publish object to MISP
|
||||
def pushToMISP(self):
|
||||
if self.currentID_date != datetime.date.today(): #refresh id
|
||||
self.eventID_to_push = self.get_daily_event_id()
|
||||
|
||||
mispTYPE = 'ail-leak'
|
||||
try:
|
||||
templateID = [x['ObjectTemplate']['id'] for x in self.pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == mispTYPE][0]
|
||||
except IndexError:
|
||||
valid_types = ", ".join([x['ObjectTemplate']['name'] for x in self.pymisp.get_object_templates_list()])
|
||||
print ("Template for type %s not found! Valid types are: %s" % (mispTYPE, valid_types))
|
||||
r = self.pymisp.add_object(self.eventID_to_push, templateID, self.mispObject)
|
||||
if 'errors' in r:
|
||||
print(r)
|
||||
else:
|
||||
print('Pushed:', self.moduleName, '->', self.p_source)
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2
|
||||
#!/usr/bin/env python3.5
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
"""
|
||||
|
@ -20,13 +20,34 @@ from packages import Paste
|
|||
from pubsublogger import publisher
|
||||
from Helper import Process
|
||||
|
||||
from pymisp import PyMISP
|
||||
import ailleakObject
|
||||
import sys
|
||||
sys.path.append('../')
|
||||
try:
|
||||
from mispKEYS import misp_url, misp_key, misp_verifycert
|
||||
flag_misp = True
|
||||
except:
|
||||
print('Misp keys not present')
|
||||
flag_misp = False
|
||||
|
||||
if __name__ == "__main__":
|
||||
publisher.port = 6380
|
||||
publisher.channel = "Script"
|
||||
|
||||
config_section = 'BrowseWarningPaste'
|
||||
config_section = 'alertHandler'
|
||||
|
||||
p = Process(config_section)
|
||||
if flag_misp:
|
||||
try:
|
||||
pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
|
||||
print('Connected to MISP:', misp_url)
|
||||
except:
|
||||
flag_misp = False
|
||||
print('Not connected to MISP')
|
||||
|
||||
if flag_misp:
|
||||
wrapper = ailleakObject.ObjectWrapper(pymisp)
|
||||
|
||||
# port generated automatically depending on the date
|
||||
curYear = datetime.now().year
|
||||
|
@ -41,6 +62,7 @@ if __name__ == "__main__":
|
|||
while True:
|
||||
message = p.get_from_set()
|
||||
if message is not None:
|
||||
message = message.decode('utf8') #decode because of pyhton3
|
||||
module_name, p_path = message.split(';')
|
||||
#PST = Paste.Paste(p_path)
|
||||
else:
|
||||
|
@ -48,12 +70,18 @@ if __name__ == "__main__":
|
|||
time.sleep(10)
|
||||
continue
|
||||
|
||||
# Add in redis
|
||||
# Add in redis for browseWarningPaste
|
||||
# Format in set: WARNING_moduleName -> p_path
|
||||
key = "WARNING_" + module_name
|
||||
print key + ' -> ' + p_path
|
||||
server.sadd(key, p_path)
|
||||
|
||||
publisher.info('Saved in warning paste {}'.format(p_path))
|
||||
#print 'Saved in warning paste {}'.format(p_path)
|
||||
publisher.info('Saved warning paste {}'.format(p_path))
|
||||
|
||||
# Create MISP AIL-leak object and push it
|
||||
if flag_misp:
|
||||
allowed_modules = ['credential', 'phone', 'creditcards']
|
||||
if module_name in allowed_modules:
|
||||
wrapper.add_new_object(module_name, p_path)
|
||||
wrapper.pushToMISP()
|
||||
else:
|
||||
print('not pushing to MISP:', module_name, p_path)
|
|
@ -53,6 +53,7 @@ if __name__ == "__main__":
|
|||
parser.add_argument('-p', '--port', type=int, default=5556, help='Zero MQ port')
|
||||
parser.add_argument('-c', '--channel', type=str, default='102', help='Zero MQ channel')
|
||||
parser.add_argument('-n', '--name', type=str, default='import_dir', help='Name of the feeder')
|
||||
parser.add_argument('-s', '--seconds', type=float, default=0.2, help='Second between pastes')
|
||||
parser.add_argument('--hierarchy', type=int, default=1, help='Number of parent directory forming the name')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
@ -90,4 +91,4 @@ if __name__ == "__main__":
|
|||
print(args.name+'>'+wanted_path)
|
||||
path_to_send = args.name + '>' + wanted_path
|
||||
socket.send('{} {} {}'.format(args.channel, path_to_send, base64.b64encode(messagedata)))
|
||||
time.sleep(.2)
|
||||
time.sleep(args.seconds)
|
||||
|
|
|
@ -72,6 +72,6 @@ screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
|||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "BrowseWarningPaste" bash -c './BrowseWarningPaste.py; read x'
|
||||
screen -S "Script" -X screen -t "alertHandler" bash -c './alertHandler.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||
|
|
|
@ -32,10 +32,10 @@ class Date(object):
|
|||
self.day = day
|
||||
|
||||
def substract_day(self, numDay):
|
||||
import datetime
|
||||
computed_date = datetime.date(int(self.year), int(self.month), int(self.day)) - datetime.timedelta(numDay)
|
||||
comp_year = str(computed_date.year)
|
||||
import datetime
|
||||
computed_date = datetime.date(int(self.year), int(self.month), int(self.day)) - datetime.timedelta(numDay)
|
||||
comp_year = str(computed_date.year)
|
||||
comp_month = str(computed_date.month).zfill(2)
|
||||
comp_day = str(computed_date.day).zfill(2)
|
||||
return comp_year + comp_month + comp_day
|
||||
return comp_year + comp_month + comp_day
|
||||
|
||||
|
|
|
@ -24,8 +24,17 @@ import operator
|
|||
import string
|
||||
import re
|
||||
import json
|
||||
import ConfigParser
|
||||
import cStringIO
|
||||
try: # dirty to support python3
|
||||
import ConfigParser
|
||||
except:
|
||||
import configparser
|
||||
ConfigParser = configparser
|
||||
try: # dirty to support python3
|
||||
import cStringIO
|
||||
except:
|
||||
from io import StringIO as cStringIO
|
||||
import sys
|
||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
||||
from Date import Date
|
||||
from Hash import Hash
|
||||
|
||||
|
@ -84,6 +93,7 @@ class Paste(object):
|
|||
var = self.p_path.split('/')
|
||||
self.p_date = Date(var[-4], var[-3], var[-2])
|
||||
self.p_source = var[-5]
|
||||
self.supposed_url = 'https://{}/{}'.format(self.p_source.replace('_pro', ''), var[-1].split('.gz')[0])
|
||||
|
||||
self.p_encoding = None
|
||||
self.p_hash_kind = {}
|
||||
|
|
|
@ -130,6 +130,9 @@ register = indexdir/all_index.txt
|
|||
#size in Mb
|
||||
index_max_size = 2000
|
||||
|
||||
[ailleakObject]
|
||||
maxDuplicateToPushToMISP=10
|
||||
|
||||
###############################################################################
|
||||
|
||||
# For multiple feed, add them with "," without space
|
||||
|
|
|
@ -49,16 +49,16 @@ publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Re
|
|||
|
||||
[CreditCards]
|
||||
subscribe = Redis_CreditCards
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler
|
||||
|
||||
[Mail]
|
||||
subscribe = Redis_Mail
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler
|
||||
|
||||
[Onion]
|
||||
subscribe = Redis_Onion
|
||||
publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_BrowseWarningPaste
|
||||
#publish = Redis_Global,Redis_ValidOnion,ZMQ_FetchedOnion,Redis_BrowseWarningPaste
|
||||
publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler
|
||||
#publish = Redis_Global,Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler
|
||||
|
||||
[DumpValidOnion]
|
||||
subscribe = Redis_ValidOnion
|
||||
|
@ -72,17 +72,17 @@ subscribe = Redis_Url
|
|||
|
||||
[SQLInjectionDetection]
|
||||
subscribe = Redis_Url
|
||||
publish = Redis_BrowseWarningPaste,Redis_Duplicate
|
||||
publish = Redis_alertHandler,Redis_Duplicate
|
||||
|
||||
[ModuleStats]
|
||||
subscribe = Redis_ModuleStats
|
||||
|
||||
[BrowseWarningPaste]
|
||||
subscribe = Redis_BrowseWarningPaste
|
||||
[alertHandler]
|
||||
subscribe = Redis_alertHandler
|
||||
|
||||
#[send_to_queue]
|
||||
#subscribe = Redis_Cve
|
||||
#publish = Redis_BrowseWarningPaste
|
||||
#publish = Redis_alertHandler
|
||||
|
||||
[SentimentAnalysis]
|
||||
subscribe = Redis_Global
|
||||
|
@ -92,16 +92,16 @@ subscribe = Redis_Global
|
|||
|
||||
[Credential]
|
||||
subscribe = Redis_Credential
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler
|
||||
|
||||
[Cve]
|
||||
subscribe = Redis_Cve
|
||||
publish = Redis_BrowseWarningPaste,Redis_Duplicate
|
||||
publish = Redis_alertHandler,Redis_Duplicate
|
||||
|
||||
[Phone]
|
||||
subscribe = Redis_Global
|
||||
publish = Redis_Duplicate,Redis_BrowseWarningPaste
|
||||
publish = Redis_Duplicate,Redis_alertHandler
|
||||
|
||||
[Keys]
|
||||
subscribe = Redis_Global
|
||||
publish = Redis_Duplicate,Redis_BrowseWarningPaste
|
||||
publish = Redis_Duplicate,Redis_alertHandler
|
||||
|
|
|
@ -11,6 +11,10 @@ sudo apt-get install python-pip python-virtualenv python-dev libfreetype6-dev \
|
|||
#Needed for bloom filters
|
||||
sudo apt-get install libssl-dev libfreetype6-dev python-numpy -y
|
||||
|
||||
#pyMISP
|
||||
sudo apt-get -y install python3-pip
|
||||
sudo pip3 install pymisp
|
||||
|
||||
# DNS deps
|
||||
sudo apt-get install libadns1 libadns1-dev -y
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
misp_url = ''
|
||||
misp_key = '' # The MISP auth key can be found on the MISP web interface under the automation section
|
||||
misp_verifycert = True
|
Loading…
Reference in New Issue