mirror of https://github.com/MISP/PyMISP
Creation of the generator object which permit to easily add attributes
and objects to daily events, stored as a MISP feed. Plus, script fromredis which pops queue element in redis to put them in the feedpull/204/head
parent
61ce67cd1c
commit
fdaa4c790c
|
@ -0,0 +1,152 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import datetime
|
||||
import time
|
||||
import redis
|
||||
|
||||
import settings
|
||||
|
||||
from generator import FeedGenerator
|
||||
|
||||
|
||||
def beautyful_sleep(sleep, additional):
|
||||
length = 20
|
||||
sleeptime = float(sleep) / float(length)
|
||||
for i in range(length):
|
||||
temp_string = '|'*i + ' '*(length-i-1)
|
||||
print('sleeping [{}]\t{}'.format(temp_string, additional), end='\r', sep='')
|
||||
sys.stdout.flush()
|
||||
time.sleep(sleeptime)
|
||||
|
||||
|
||||
class RedisToMISPFeed:
|
||||
SUFFIX_SIGH = '_sighting'
|
||||
SUFFIX_ATTR = '_attribute'
|
||||
SUFFIX_OBJ = '_object'
|
||||
SUFFIX_LIST = [SUFFIX_SIGH, SUFFIX_ATTR, SUFFIX_OBJ]
|
||||
|
||||
def __init__(self):
|
||||
self.host = settings.host
|
||||
self.port = settings.port
|
||||
self.db = settings.db
|
||||
self.serv = redis.StrictRedis(self.host, self.port, self.db, decode_responses=True)
|
||||
|
||||
self.flushing_interval = settings.flushing_interval
|
||||
self.flushing_next = time.time() + self.flushing_interval
|
||||
|
||||
self.generator = FeedGenerator()
|
||||
|
||||
self.keynames = []
|
||||
for k in settings.keyname_pop:
|
||||
for s in self.SUFFIX_LIST:
|
||||
self.keynames.append(k+s)
|
||||
|
||||
self.keynameError = settings.keyname_error
|
||||
|
||||
self.last_flush = datetime.datetime.now()
|
||||
self.update_last_action("Init system")
|
||||
|
||||
def consume(self):
|
||||
self.update_last_action("Started consuming redis")
|
||||
while True:
|
||||
flag_empty = True
|
||||
for key in self.keynames:
|
||||
while True:
|
||||
data = self.pop(key)
|
||||
if data is None:
|
||||
break
|
||||
try:
|
||||
self.perform_action(key, data)
|
||||
except Exception as error:
|
||||
self.save_error_to_redis(error, data)
|
||||
flag_empty = False
|
||||
|
||||
# Define when to write event on disk
|
||||
if flag_empty and self.flushing_next <= time.time() and self.last_action_time>self.last_flush:
|
||||
self.update_last_action('Flushed on disk')
|
||||
self.generator.flush_event()
|
||||
self.flushing_next = time.time() + self.flushing_interval
|
||||
self.last_flush = datetime.datetime.now()
|
||||
|
||||
beautyful_sleep(5, self.format_last_action())
|
||||
|
||||
def pop(self, key):
|
||||
popped = self.serv.rpop(key)
|
||||
if popped is None:
|
||||
return None
|
||||
try:
|
||||
popped = json.loads(popped)
|
||||
except ValueError as error:
|
||||
self.save_error_to_redis(error, popped)
|
||||
except ValueError as error:
|
||||
self.save_error_to_redis(error, popped)
|
||||
return popped
|
||||
|
||||
def perform_action(self, key, data):
|
||||
# sighting
|
||||
if key.endswith(self.SUFFIX_SIGH):
|
||||
if self.generator.add_sighting_on_attribute():
|
||||
self.update_last_action("Added sighting")
|
||||
else:
|
||||
self.update_last_action("Error while adding sighting")
|
||||
|
||||
# attribute
|
||||
elif key.endswith(self.SUFFIX_ATTR):
|
||||
attr_type = data.pop('type')
|
||||
attr_value = data.pop('value')
|
||||
if self.generator.add_attribute_to_event(attr_type, attr_value, **data):
|
||||
self.update_last_action("Added attribute")
|
||||
else:
|
||||
self.update_last_action("Error while adding attribute")
|
||||
|
||||
# object
|
||||
elif key.endswith(self.SUFFIX_OBJ):
|
||||
# create the MISP object
|
||||
obj_name = data.pop('name')
|
||||
if self.generator.add_object_to_event(obj_name, **data):
|
||||
self.update_last_action("Added object")
|
||||
else:
|
||||
self.update_last_action("Error while adding object")
|
||||
|
||||
else:
|
||||
# Suffix not valid
|
||||
self.update_last_action("Redis key suffix not supported")
|
||||
|
||||
# OTHERS
|
||||
def update_last_action(self, action):
|
||||
self.last_action = action
|
||||
self.last_action_time = datetime.datetime.now()
|
||||
|
||||
def format_last_action(self):
|
||||
temp = datetime.datetime.now() - self.last_flush
|
||||
return "Last action: [{}] @ {}.\tLast flush: {} ago".format(
|
||||
self.last_action,
|
||||
self.last_action_time.isoformat().replace('T', ' '),
|
||||
str(temp).split('.')[0]
|
||||
)
|
||||
|
||||
def get_buffer_state(self):
|
||||
buffer_state = {'attribute': 0, 'object': 0, 'sighting': 0}
|
||||
for k in self.keynames:
|
||||
_ , suffix = k.rsplit('_', 1)
|
||||
buffer_state[suffix] += self.serv.llen(k)
|
||||
return buffer_state
|
||||
|
||||
|
||||
def save_error_to_redis(self, error, item):
|
||||
to_push = {'error': str(error), 'item': str(item)}
|
||||
print('Error:', str(error), '\nOn adding:', item)
|
||||
self.serv.lpush(self.keynameError, to_push)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="Pop item fom redis and add "
|
||||
+ "it to the MISP feed. By default, each action are pushed into a "
|
||||
+ "daily named event. Configuration taken from the file settings.py.")
|
||||
args = parser.parse_args()
|
||||
|
||||
redisToMISP = RedisToMISPFeed()
|
||||
redisToMISP.consume()
|
|
@ -1,346 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import hashlib
|
||||
import argparse
|
||||
import datetime, time
|
||||
import uuid
|
||||
import threading
|
||||
import redis
|
||||
|
||||
from redis import StrictRedis as Redis
|
||||
import settings
|
||||
|
||||
from pymisp import MISPEvent, MISPAttribute
|
||||
from pymisp.tools import GenericObjectGenerator
|
||||
|
||||
evtObj=thr=None # animation thread
|
||||
|
||||
def get_system_templates():
|
||||
misp_objects_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(sys.modules['pymisp'].__file__)),
|
||||
'data', 'misp-objects', 'objects')
|
||||
|
||||
templates = {}
|
||||
for root, dirs, files in os.walk(misp_objects_path, topdown=False):
|
||||
for def_file in files:
|
||||
obj_name = root.split('/')[-1]
|
||||
template_path = os.path.join(root, def_file)
|
||||
with open(template_path, 'r') as f:
|
||||
definition = json.load(f)
|
||||
templates[obj_name] = definition
|
||||
return templates
|
||||
|
||||
|
||||
def gen_uuid():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
def processing_animation(evtObj, buffer_state, refresh_rate=5):
|
||||
i = 0
|
||||
buffer_state_str = 'attributes: {}, objects: {}, sightings: {}'.format(buffer_state['attribute'], buffer_state['object'], buffer_state['sighting'])
|
||||
while True:
|
||||
if evtObj.is_set():
|
||||
print(" "*(len(buffer_state_str)+20), end="\r", sep="") # overwrite last characters
|
||||
sys.stdout.flush()
|
||||
return
|
||||
i += 1
|
||||
print("Remaining: { %s }\t" % buffer_state_str + "/-\|"[i%4], end="\r", sep="")
|
||||
sys.stdout.flush()
|
||||
time.sleep(1.0/float(refresh_rate))
|
||||
|
||||
def beautyful_sleep(sleep):
|
||||
length = 20
|
||||
sleeptime = float(sleep) / float(length)
|
||||
for i in range(length):
|
||||
temp_string = '|'*i + ' '*(length-i-1)
|
||||
print('sleeping [{}]'.format(temp_string), end='\r', sep='')
|
||||
sys.stdout.flush()
|
||||
time.sleep(sleeptime)
|
||||
|
||||
|
||||
|
||||
|
||||
class RedisToMISPFeed:
|
||||
SUFFIX_SIGH = '_sighting'
|
||||
SUFFIX_ATTR = '_attribute'
|
||||
SUFFIX_OBJ = '_object'
|
||||
SUFFIX_LIST = [SUFFIX_SIGH, SUFFIX_ATTR, SUFFIX_OBJ]
|
||||
|
||||
def __init__(self):
|
||||
self.host = settings.host
|
||||
self.port = settings.port
|
||||
self.db = settings.db
|
||||
self.serv = redis.StrictRedis(self.host, self.port, self.db, decode_responses=True)
|
||||
|
||||
self.keynames = []
|
||||
for k in settings.keyname_pop:
|
||||
for s in self.SUFFIX_LIST:
|
||||
self.keynames.append(k+s)
|
||||
|
||||
# get all templates
|
||||
self.sys_templates = get_system_templates()
|
||||
|
||||
self.sleep = settings.sleep
|
||||
self.flushing_interval = settings.flushing_interval
|
||||
self.flushing_next = time.time() + self.flushing_interval
|
||||
|
||||
self.manifest = {}
|
||||
self.attributeHashes = []
|
||||
|
||||
self.keynameError = settings.keyname_error
|
||||
self.allow_animation = settings.allow_animation
|
||||
|
||||
self.daily_event_name = settings.daily_event_name + ' {}'
|
||||
_, self.current_event_uuid, self.event_name = self.get_last_event_from_manifest()
|
||||
self.current_date = datetime.date.today()
|
||||
self.current_event = self.get_event_from_id(self.current_event_uuid)
|
||||
|
||||
global evtObj, thr
|
||||
self.evtObj = evtObj
|
||||
self.thr = thr
|
||||
|
||||
def consume(self):
|
||||
while True:
|
||||
flag_empty = True
|
||||
for key in self.keynames:
|
||||
while True:
|
||||
data = self.pop(key)
|
||||
if data is None:
|
||||
break
|
||||
try:
|
||||
self.perform_action(key, data)
|
||||
except Exception as error:
|
||||
self.save_error_to_redis(error, data)
|
||||
flag_empty = False
|
||||
|
||||
|
||||
if flag_empty and self.flushing_next <= time.time():
|
||||
self.flush_event()
|
||||
flushing_next = time.time() + self.flushing_interval
|
||||
|
||||
beautyful_sleep(5)
|
||||
|
||||
def pop(self, key):
|
||||
popped = self.serv.rpop(key)
|
||||
if popped is None:
|
||||
return None
|
||||
try:
|
||||
popped = json.loads(popped)
|
||||
except ValueError as error:
|
||||
self.save_error_to_redis(error, popped)
|
||||
except ValueError as error:
|
||||
self.save_error_to_redis(error, popped)
|
||||
return popped
|
||||
|
||||
|
||||
def perform_action(self, key, data):
|
||||
self.update_daily_event_id()
|
||||
|
||||
# sighting
|
||||
if key.endswith(self.SUFFIX_SIGH):
|
||||
pass
|
||||
|
||||
# attribute
|
||||
elif key.endswith(self.SUFFIX_ATTR):
|
||||
attr_type = data.pop('type')
|
||||
attr_value = data.pop('value')
|
||||
self.current_event.add_attribute(attr_type, attr_value, **data)
|
||||
self.add_hash(attr_type, attr_value)
|
||||
|
||||
# object
|
||||
elif key.endswith(self.SUFFIX_OBJ):
|
||||
# create the MISP object
|
||||
obj_name = data.pop('name')
|
||||
misp_object = GenericObjectGenerator(obj_name)
|
||||
for k, v in data.items():
|
||||
if k not in self.sys_templates[obj_name]['attributes']: # attribute is not in the object template definition
|
||||
# add it with type text
|
||||
misp_object.add_attribute(k, **{'value': v, 'type': 'text'})
|
||||
else:
|
||||
misp_object.add_attribute(k, **{'value': v})
|
||||
|
||||
self.current_event.add_object(misp_object)
|
||||
for attr_type, attr_value in data.items():
|
||||
self.add_hash(attr_type, attr_value)
|
||||
|
||||
|
||||
else:
|
||||
raise NoValidKey("Can't define action to perform")
|
||||
|
||||
|
||||
def add_hash(self, attr_type, attr_value):
|
||||
if ('|' in attr_type or attr_type == 'malware-sample'):
|
||||
split = attr_value.split('|')
|
||||
self.attributeHashes.append([hashlib.md5(str(split[0]).encode("utf-8")).hexdigest(), self.current_event_uuid])
|
||||
self.attributeHashes.append([hashlib.md5(str(split[1]).encode("utf-8")).hexdigest(), self.current_event_uuid])
|
||||
else:
|
||||
self.attributeHashes.append([hashlib.md5(str(attr_value).encode("utf-8")).hexdigest(), self.current_event_uuid])
|
||||
|
||||
# Manifest
|
||||
def init_manifest(self):
|
||||
# create an empty manifest
|
||||
with open(os.path.join(settings.outputdir, 'manifest.json'), 'w') as f:
|
||||
pass
|
||||
# create new event and save manifest
|
||||
self.create_daily_event()
|
||||
|
||||
|
||||
def flush_event(self, new_event=None):
|
||||
print('Writting event on disk'+' '*20)
|
||||
self.print_processing()
|
||||
if new_event is not None:
|
||||
event_uuid = new_event['uuid']
|
||||
event = new_event
|
||||
else:
|
||||
event_uuid = self.current_event_uuid
|
||||
event = self.current_event
|
||||
|
||||
eventFile = open(os.path.join(settings.outputdir, event_uuid + '.json'), 'w')
|
||||
eventFile.write(event.to_json())
|
||||
eventFile.close()
|
||||
|
||||
self.saveHashes()
|
||||
if self.allow_animation:
|
||||
self.evtObj.set()
|
||||
self.thr.join()
|
||||
|
||||
def saveManifest(self):
|
||||
try:
|
||||
manifestFile = open(os.path.join(settings.outputdir, 'manifest.json'), 'w')
|
||||
manifestFile.write(json.dumps(self.manifest))
|
||||
manifestFile.close()
|
||||
print('Manifest saved')
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit('Could not create the manifest file.')
|
||||
|
||||
def saveHashes(self):
|
||||
if len(self.attributeHashes) == 0:
|
||||
return False
|
||||
try:
|
||||
hashFile = open(os.path.join(settings.outputdir, 'hashes.csv'), 'a')
|
||||
for element in self.attributeHashes:
|
||||
hashFile.write('{},{}\n'.format(element[0], element[1]))
|
||||
hashFile.close()
|
||||
self.attributeHashes = []
|
||||
print('Hash saved' + ' '*30)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit('Could not create the quick hash lookup file.')
|
||||
|
||||
|
||||
def __addEventToManifest(self, event):
|
||||
event_dict = event.to_dict()['Event']
|
||||
tags = []
|
||||
for eventTag in event_dict.get('EventTag', []):
|
||||
tags.append({'name': eventTag['Tag']['name'],
|
||||
'colour': eventTag['Tag']['colour']})
|
||||
return {
|
||||
'Orgc': event_dict.get('Orgc', []),
|
||||
'Tag': tags,
|
||||
'info': event_dict['info'],
|
||||
'date': event_dict['date'],
|
||||
'analysis': event_dict['analysis'],
|
||||
'threat_level_id': event_dict['threat_level_id'],
|
||||
'timestamp': event_dict.get('timestamp', int(time.time()))
|
||||
}
|
||||
|
||||
# Retreive last event from the manifest, if the manifest doesn't exists
|
||||
# or if it is empty, initialize it.
|
||||
def get_last_event_from_manifest(self):
|
||||
try:
|
||||
with open(os.path.join(settings.outputdir, 'manifest.json'), 'r') as f:
|
||||
man = json.load(f)
|
||||
dated_events = []
|
||||
for event_uuid, event_json in man.items():
|
||||
# add events to manifest
|
||||
self.manifest[event_uuid] = event_json
|
||||
dated_events.append([event_json['date'], event_uuid, event_json['info']])
|
||||
dated_events.sort(key=lambda k: (k[0], k[2]), reverse=True) # sort by date then by event name
|
||||
return dated_events[0]
|
||||
except FileNotFoundError as e:
|
||||
print('Manifest not found, generating a fresh one')
|
||||
self.init_manifest()
|
||||
return self.get_last_event_from_manifest()
|
||||
|
||||
# DAILY
|
||||
def update_daily_event_id(self):
|
||||
if self.current_date != datetime.date.today(): # create new event
|
||||
# save current event on disk
|
||||
self.flush_event()
|
||||
self.current_event = create_daily_event()
|
||||
self.current_event_uuid = self.current_event.get('uuid')
|
||||
self.event_name = self.current_event.info
|
||||
|
||||
def get_event_from_id(self, event_uuid):
|
||||
with open(os.path.join(settings.outputdir, '%s.json' % event_uuid), 'r') as f:
|
||||
event_dict = json.load(f)['Event']
|
||||
event = MISPEvent()
|
||||
event.from_dict(**event_dict)
|
||||
return event
|
||||
|
||||
def create_daily_event(self):
|
||||
new_uuid = gen_uuid()
|
||||
today = str(datetime.date.today())
|
||||
event_dict = {
|
||||
'uuid': new_uuid,
|
||||
'id': len(self.manifest)+1,
|
||||
'Tag': settings.Tag,
|
||||
'info': self.daily_event_name.format(today),
|
||||
'analysis': settings.analysis, # [0-2]
|
||||
'threat_level_id': settings.threat_level_id, # [1-4]
|
||||
'published': settings.published,
|
||||
'date': today
|
||||
}
|
||||
event = MISPEvent()
|
||||
event.from_dict(**event_dict)
|
||||
|
||||
# reference org
|
||||
org_dict = {}
|
||||
org_dict['name'] = settings.org_name
|
||||
org_dict['uui'] = settings.org_uuid
|
||||
event['Orgc'] = org_dict
|
||||
|
||||
# save event on disk
|
||||
self.flush_event(new_event=event)
|
||||
# add event to manifest
|
||||
self.manifest[event['uuid']] = self.__addEventToManifest(event)
|
||||
self.saveManifest()
|
||||
return event
|
||||
|
||||
# OTHERS
|
||||
def get_buffer_state(self):
|
||||
buffer_state = {'attribute': 0, 'object': 0, 'sighting': 0}
|
||||
for k in self.keynames:
|
||||
_ , suffix = k.rsplit('_', 1)
|
||||
buffer_state[suffix] += self.serv.llen(k)
|
||||
return buffer_state
|
||||
|
||||
|
||||
def print_processing(self):
|
||||
if self.allow_animation:
|
||||
buff_states = self.get_buffer_state()
|
||||
self.evtObj = threading.Event()
|
||||
self.thr = threading.Thread(name="processing-animation", target=processing_animation, args=(self.evtObj, buff_states, ))
|
||||
self.thr.start()
|
||||
|
||||
def save_error_to_redis(self, error, item):
|
||||
to_push = {'error': str(error), 'item': str(item)}
|
||||
print('Error:', str(error), '\nOn adding:', item)
|
||||
self.serv.lpush(self.keynameError, to_push)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="Pop item fom redis and add it to the MISP feed. By default, each action are pushed into a daily named event. Configuration taken from the file settings.py.")
|
||||
args = parser.parse_args()
|
||||
|
||||
redisToMISP = RedisToMISPFeed()
|
||||
try:
|
||||
redisToMISP.consume()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
if evtObj is not None:
|
||||
evtObj.set()
|
||||
thr.join()
|
|
@ -0,0 +1,236 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import hashlib
|
||||
import datetime
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from pymisp import MISPEvent
|
||||
from pymisp.tools import GenericObjectGenerator
|
||||
|
||||
import settings
|
||||
|
||||
|
||||
def get_system_templates():
|
||||
"""Fetch all MISP-Object template present on the local system.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary listing all MISP-Object templates
|
||||
|
||||
"""
|
||||
misp_objects_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(sys.modules['pymisp'].__file__)),
|
||||
'data', 'misp-objects', 'objects')
|
||||
|
||||
templates = {}
|
||||
for root, dirs, files in os.walk(misp_objects_path, topdown=False):
|
||||
for def_file in files:
|
||||
obj_name = root.split('/')[-1]
|
||||
template_path = os.path.join(root, def_file)
|
||||
with open(template_path, 'r') as f:
|
||||
definition = json.load(f)
|
||||
templates[obj_name] = definition
|
||||
return templates
|
||||
|
||||
|
||||
def gen_uuid():
|
||||
"""Generate a random UUID and returns its string representation"""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
class FeedGenerator:
|
||||
|
||||
def __init__(self):
|
||||
self.sys_templates = get_system_templates()
|
||||
|
||||
self.manifest = {}
|
||||
self.attributeHashes = []
|
||||
|
||||
self.daily_event_name = settings.daily_event_name + ' {}'
|
||||
_, self.current_event_uuid, self.event_name = self.get_last_event_from_manifest()
|
||||
self.current_date = datetime.date.today()
|
||||
self.current_event = self._get_event_from_id(self.current_event_uuid)
|
||||
|
||||
def add_sighting_on_attribute(self, sight_type, attr_uuid, **data):
|
||||
self.update_daily_event_id()
|
||||
return False
|
||||
|
||||
def add_attribute_to_event(self, attr_type, attr_value, **attr_data):
|
||||
self.update_daily_event_id()
|
||||
self.current_event.add_attribute(attr_type, attr_value, **attr_data)
|
||||
self._add_hash(attr_type, attr_value)
|
||||
return True
|
||||
|
||||
def add_object_to_event(self, obj_name, **data):
|
||||
self.update_daily_event_id()
|
||||
# create the MISP object
|
||||
misp_object = GenericObjectGenerator(obj_name)
|
||||
if obj_name not in self.sys_templates:
|
||||
print('Unkown object template')
|
||||
return False
|
||||
|
||||
for k, v in data.items():
|
||||
# attribute is not in the object template definition
|
||||
if k not in self.sys_templates[obj_name]['attributes']:
|
||||
# add it with type text
|
||||
misp_object.add_attribute(k, **{'value': v, 'type': 'text'})
|
||||
else:
|
||||
misp_object.add_attribute(k, **{'value': v})
|
||||
|
||||
self.current_event.add_object(misp_object)
|
||||
for attr_type, attr_value in data.items():
|
||||
self._add_hash(attr_type, attr_value)
|
||||
|
||||
return True
|
||||
|
||||
# Cache
|
||||
def _add_hash(self, attr_type, attr_value):
|
||||
if ('|' in attr_type or attr_type == 'malware-sample'):
|
||||
split = attr_value.split('|')
|
||||
self.attributeHashes.append([
|
||||
hashlib.md5(str(split[0]).encode("utf-8")).hexdigest(),
|
||||
self.current_event_uuid
|
||||
])
|
||||
self.attributeHashes.append([
|
||||
hashlib.md5(str(split[1]).encode("utf-8")).hexdigest(),
|
||||
self.current_event_uuid
|
||||
])
|
||||
else:
|
||||
self.attributeHashes.append([
|
||||
hashlib.md5(str(attr_value).encode("utf-8")).hexdigest(),
|
||||
self.current_event_uuid
|
||||
])
|
||||
|
||||
# Manifest
|
||||
def _init_manifest(self):
|
||||
# create an empty manifest
|
||||
with open(os.path.join(settings.outputdir, 'manifest.json'), 'w'):
|
||||
pass
|
||||
# create new event and save manifest
|
||||
self.create_daily_event()
|
||||
|
||||
def flush_event(self, new_event=None):
|
||||
print('Writting event on disk'+' '*20)
|
||||
if new_event is not None:
|
||||
event_uuid = new_event['uuid']
|
||||
event = new_event
|
||||
else:
|
||||
event_uuid = self.current_event_uuid
|
||||
event = self.current_event
|
||||
|
||||
eventFile = open(os.path.join(settings.outputdir, event_uuid+'.json'), 'w')
|
||||
eventFile.write(event.to_json())
|
||||
eventFile.close()
|
||||
|
||||
self.save_hashes()
|
||||
|
||||
def save_manifest(self):
|
||||
try:
|
||||
manifestFile = open(os.path.join(settings.outputdir, 'manifest.json'), 'w')
|
||||
manifestFile.write(json.dumps(self.manifest))
|
||||
manifestFile.close()
|
||||
print('Manifest saved')
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit('Could not create the manifest file.')
|
||||
|
||||
def save_hashes(self):
|
||||
if len(self.attributeHashes) == 0:
|
||||
return False
|
||||
try:
|
||||
hashFile = open(os.path.join(settings.outputdir, 'hashes.csv'), 'a')
|
||||
for element in self.attributeHashes:
|
||||
hashFile.write('{},{}\n'.format(element[0], element[1]))
|
||||
hashFile.close()
|
||||
self.attributeHashes = []
|
||||
print('Hash saved' + ' '*30)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit('Could not create the quick hash lookup file.')
|
||||
|
||||
def _addEventToManifest(self, event):
|
||||
event_dict = event.to_dict()['Event']
|
||||
tags = []
|
||||
for eventTag in event_dict.get('EventTag', []):
|
||||
tags.append({'name': eventTag['Tag']['name'],
|
||||
'colour': eventTag['Tag']['colour']})
|
||||
return {
|
||||
'Orgc': event_dict.get('Orgc', []),
|
||||
'Tag': tags,
|
||||
'info': event_dict['info'],
|
||||
'date': event_dict['date'],
|
||||
'analysis': event_dict['analysis'],
|
||||
'threat_level_id': event_dict['threat_level_id'],
|
||||
'timestamp': event_dict.get('timestamp', int(time.time()))
|
||||
}
|
||||
|
||||
def get_last_event_from_manifest(self):
|
||||
"""Retreive last event from the manifest, if the manifest doesn't
|
||||
exists or if it is empty, initialize it.
|
||||
"""
|
||||
try:
|
||||
with open(os.path.join(settings.outputdir, 'manifest.json'), 'r') as f:
|
||||
man = json.load(f)
|
||||
dated_events = []
|
||||
for event_uuid, event_json in man.items():
|
||||
# add events to manifest
|
||||
self.manifest[event_uuid] = event_json
|
||||
dated_events.append([
|
||||
event_json['date'],
|
||||
event_uuid, event_json['info']
|
||||
])
|
||||
# Sort by date then by event name
|
||||
dated_events.sort(key=lambda k: (k[0], k[2]), reverse=True)
|
||||
return dated_events[0]
|
||||
except FileNotFoundError as e:
|
||||
print('Manifest not found, generating a fresh one')
|
||||
self._init_manifest()
|
||||
return self.get_last_event_from_manifest()
|
||||
|
||||
# DAILY
|
||||
def update_daily_event_id(self):
|
||||
if self.current_date != datetime.date.today(): # create new event
|
||||
# save current event on disk
|
||||
self.flush_event()
|
||||
self.current_event = self.create_daily_event()
|
||||
self.current_event_uuid = self.current_event.get('uuid')
|
||||
self.event_name = self.current_event.info
|
||||
|
||||
def _get_event_from_id(self, event_uuid):
|
||||
with open(os.path.join(settings.outputdir, '%s.json' % event_uuid), 'r') as f:
|
||||
event_dict = json.load(f)['Event']
|
||||
event = MISPEvent()
|
||||
event.from_dict(**event_dict)
|
||||
return event
|
||||
|
||||
def create_daily_event(self):
|
||||
new_uuid = gen_uuid()
|
||||
today = str(datetime.date.today())
|
||||
event_dict = {
|
||||
'uuid': new_uuid,
|
||||
'id': len(self.manifest)+1,
|
||||
'Tag': settings.Tag,
|
||||
'info': self.daily_event_name.format(today),
|
||||
'analysis': settings.analysis, # [0-2]
|
||||
'threat_level_id': settings.threat_level_id, # [1-4]
|
||||
'published': settings.published,
|
||||
'date': today
|
||||
}
|
||||
event = MISPEvent()
|
||||
event.from_dict(**event_dict)
|
||||
|
||||
# reference org
|
||||
org_dict = {}
|
||||
org_dict['name'] = settings.org_name
|
||||
org_dict['uui'] = settings.org_uuid
|
||||
event['Orgc'] = org_dict
|
||||
|
||||
# save event on disk
|
||||
self.flush_event(new_event=event)
|
||||
# add event to manifest
|
||||
self.manifest[event['uuid']] = self._addEventToManifest(event)
|
||||
self.save_manifest()
|
||||
return event
|
|
@ -1,3 +1,4 @@
|
|||
""" REDIS RELATED """
|
||||
# Your redis server
|
||||
host='127.0.0.1'
|
||||
port=6379
|
||||
|
@ -6,6 +7,14 @@ db=0
|
|||
#keyname_pop='misp_feed_generator_key'
|
||||
keyname_pop=['cowrie']
|
||||
|
||||
# OTHERS
|
||||
## How frequent the event should be written on disk
|
||||
flushing_interval=5*60
|
||||
## The redis list keyname in which to put items that generated an error
|
||||
keyname_error='feed-generation-error'
|
||||
|
||||
""" FEED GENERATOR CONFIGURATION """
|
||||
|
||||
# The output dir for the feed. This will drop a lot of files, so make
|
||||
# sure that you use a directory dedicated to the feed
|
||||
outputdir = 'output'
|
||||
|
@ -16,27 +25,25 @@ outputdir = 'output'
|
|||
org_name='myOrg'
|
||||
### Your organisation UUID
|
||||
org_uuid=''
|
||||
### The daily event name to be used in MISP. (e.g. honeypot_1, will produce each day an event of the form honeypot_1 dd-mm-yyyy)
|
||||
### The daily event name to be used in MISP.
|
||||
### (e.g. honeypot_1, will produce each day an event of the form honeypot_1 dd-mm-yyyy)
|
||||
daily_event_name='PyMISP default event name'
|
||||
|
||||
## Optional
|
||||
analysis=0
|
||||
threat_level_id=3
|
||||
published=False
|
||||
Tag=[{
|
||||
Tag=[
|
||||
{
|
||||
"colour": "#ffffff",
|
||||
"name": "tlp:white"
|
||||
},
|
||||
{
|
||||
"colour": "#ff00ff",
|
||||
"name": "my:custom:feed"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
# Others
|
||||
## Redis pooling time
|
||||
sleep=60
|
||||
## The redis list keyname in which to put items that generated an error
|
||||
keyname_error='feed-generation-error'
|
||||
## Display an animation while adding element to MISP
|
||||
allow_animation=True
|
||||
## How frequent the event should be written on disk
|
||||
flushing_interval=5*60
|
||||
|
|
Loading…
Reference in New Issue