mirror of https://github.com/MISP/PyMISP
Rework of the feed generator
- use objects, attribute tags and object references correctly - generate quickhashlist for fast lookups / future MISP caching mechanism - saner structure (herp-a-derp)pull/141/head
parent
0e123af546
commit
195cd6d7fc
|
@ -4,28 +4,79 @@
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import hashlib
|
||||||
from pymisp import PyMISP
|
from pymisp import PyMISP
|
||||||
from settings import url, key, ssl, outputdir, filters, valid_attribute_distribution_levels
|
from settings import url, key, ssl, outputdir, filters, valid_attribute_distribution_levels
|
||||||
|
|
||||||
|
objectsFields = {
|
||||||
objectsToSave = {'Orgc': {'fields': ['name', 'uuid'],
|
'Attribute': {
|
||||||
'multiple': False,
|
'uuid',
|
||||||
|
'value',
|
||||||
|
'category',
|
||||||
|
'type',
|
||||||
|
'comment',
|
||||||
|
'data',
|
||||||
|
'timestamp',
|
||||||
|
'to_ids'
|
||||||
},
|
},
|
||||||
'Tag': {'fields': ['name', 'colour', 'exportable'],
|
'Event': {
|
||||||
'multiple': True,
|
'uuid',
|
||||||
|
'info',
|
||||||
|
'threat_level_id',
|
||||||
|
'analysis',
|
||||||
|
'timestamp',
|
||||||
|
'publish_timestamp',
|
||||||
|
'published',
|
||||||
|
'date'
|
||||||
},
|
},
|
||||||
'Attribute': {'fields': ['uuid', 'value', 'category', 'type',
|
'Object': {
|
||||||
'comment', 'data', 'timestamp', 'to_ids'],
|
'name',
|
||||||
'multiple': True,
|
'meta-category',
|
||||||
|
'description',
|
||||||
|
'template_uuid',
|
||||||
|
'template_version',
|
||||||
|
'uuid',
|
||||||
|
'timestamp',
|
||||||
|
'distribution',
|
||||||
|
'sharing_group_id',
|
||||||
|
'comment'
|
||||||
},
|
},
|
||||||
|
'ObjectReference': {
|
||||||
|
'uuid',
|
||||||
|
'timestamp',
|
||||||
|
'relationship_type',
|
||||||
|
'comment',
|
||||||
|
'object_uuid',
|
||||||
|
'referenced_uuid'
|
||||||
|
},
|
||||||
|
'Orgc': {
|
||||||
|
'name',
|
||||||
|
'uuid'
|
||||||
|
},
|
||||||
|
'Tag': {
|
||||||
|
'name',
|
||||||
|
'colour',
|
||||||
|
'exportable'
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fieldsToSave = ['uuid', 'info', 'threat_level_id', 'analysis',
|
objectsToSave = {
|
||||||
'timestamp', 'publish_timestamp', 'published',
|
'Orgc': {},
|
||||||
'date']
|
'Tag': {},
|
||||||
|
'Attribute': {
|
||||||
|
'Tag': {}
|
||||||
|
},
|
||||||
|
'Object': {
|
||||||
|
'Attribute': {
|
||||||
|
'Tag': {}
|
||||||
|
},
|
||||||
|
'ObjectReference': {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
valid_attribute_distributions = []
|
valid_attribute_distributions = []
|
||||||
|
|
||||||
|
attributeHashes = []
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
# If we have an old settings.py file then this variable won't exist
|
# If we have an old settings.py file then this variable won't exist
|
||||||
|
@ -36,61 +87,65 @@ def init():
|
||||||
valid_attribute_distributions = ['0', '1', '2', '3', '4', '5']
|
valid_attribute_distributions = ['0', '1', '2', '3', '4', '5']
|
||||||
return PyMISP(url, key, ssl)
|
return PyMISP(url, key, ssl)
|
||||||
|
|
||||||
|
def recursiveExtract(container, containerType, leaf, eventUuid):
|
||||||
|
temp = {}
|
||||||
|
if containerType in ['Attribute', 'Object']:
|
||||||
|
if (__blockByDistribution(container)):
|
||||||
|
return False
|
||||||
|
for field in objectsFields[containerType]:
|
||||||
|
if field in container:
|
||||||
|
temp[field] = container[field]
|
||||||
|
if (containerType == 'Attribute'):
|
||||||
|
global attributeHashes
|
||||||
|
if ('|' in container['type'] or container['type'] == 'malware-sample'):
|
||||||
|
split = container['value'].split('|')
|
||||||
|
attributeHashes.append([hashlib.md5(split[0].encode("utf-8")).hexdigest(), eventUuid])
|
||||||
|
attributeHashes.append([hashlib.md5(split[1].encode("utf-8")).hexdigest(), eventUuid])
|
||||||
|
else:
|
||||||
|
attributeHashes.append([hashlib.md5(container['value'].encode("utf-8")).hexdigest(), eventUuid])
|
||||||
|
children = leaf.keys()
|
||||||
|
for childType in children:
|
||||||
|
childContainer = container.get(childType)
|
||||||
|
if (childContainer):
|
||||||
|
if (type(childContainer) is dict):
|
||||||
|
temp[childType] = recursiveExtract(childContainer, childType, leaf[childType], eventUuid)
|
||||||
|
else:
|
||||||
|
temp[childType] = []
|
||||||
|
for element in childContainer:
|
||||||
|
processed = recursiveExtract(element, childType, leaf[childType], eventUuid)
|
||||||
|
if (processed):
|
||||||
|
temp[childType].append(processed)
|
||||||
|
return temp
|
||||||
|
|
||||||
def saveEvent(misp, uuid):
|
def saveEvent(misp, uuid):
|
||||||
|
result = {}
|
||||||
event = misp.get_event(uuid)
|
event = misp.get_event(uuid)
|
||||||
if not event.get('Event'):
|
if not event.get('Event'):
|
||||||
print('Error while fetching event: {}'.format(event['message']))
|
print('Error while fetching event: {}'.format(event['message']))
|
||||||
sys.exit('Could not create file for event ' + uuid + '.')
|
sys.exit('Could not create file for event ' + uuid + '.')
|
||||||
event = __cleanUpEvent(event)
|
event['Event'] = recursiveExtract(event['Event'], 'Event', objectsToSave, event['Event']['uuid'])
|
||||||
event = json.dumps(event)
|
event = json.dumps(event)
|
||||||
eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w')
|
eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w')
|
||||||
eventFile.write(event)
|
eventFile.write(event)
|
||||||
eventFile.close()
|
eventFile.close()
|
||||||
|
|
||||||
|
def __blockByDistribution(element):
|
||||||
def __cleanUpEvent(event):
|
if element['distribution'] not in valid_attribute_distributions:
|
||||||
temp = event
|
|
||||||
event = {'Event': {}}
|
|
||||||
__cleanupEventFields(event, temp)
|
|
||||||
__cleanupEventObjects(event, temp)
|
|
||||||
return event
|
|
||||||
|
|
||||||
|
|
||||||
def __cleanupEventFields(event, temp):
|
|
||||||
for field in fieldsToSave:
|
|
||||||
if field in temp['Event'].keys():
|
|
||||||
event['Event'][field] = temp['Event'][field]
|
|
||||||
return event
|
|
||||||
|
|
||||||
|
|
||||||
def __blockAttributeByDistribution(attribute):
|
|
||||||
if attribute['distribution'] not in valid_attribute_distributions:
|
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def saveHashes():
|
||||||
|
if not attributeHashes:
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
hashFile = open(os.path.join(outputdir, 'hashes.csv'), 'w')
|
||||||
|
for element in attributeHashes:
|
||||||
|
hashFile.write('{},{}\n'.format(element[0], element[1]))
|
||||||
|
hashFile.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
sys.exit('Could not create the quick hash lookup file.')
|
||||||
|
|
||||||
def __cleanupEventObjects(event, temp):
|
|
||||||
for objectType in objectsToSave.keys():
|
|
||||||
if objectsToSave[objectType]['multiple'] is True:
|
|
||||||
if objectType in temp['Event']:
|
|
||||||
for objectInstance in temp['Event'][objectType]:
|
|
||||||
if objectType is 'Attribute':
|
|
||||||
if __blockAttributeByDistribution(objectInstance):
|
|
||||||
continue
|
|
||||||
tempObject = {}
|
|
||||||
for field in objectsToSave[objectType]['fields']:
|
|
||||||
if field in objectInstance.keys():
|
|
||||||
tempObject[field] = objectInstance[field]
|
|
||||||
if objectType not in event['Event']:
|
|
||||||
event['Event'][objectType] = []
|
|
||||||
event['Event'][objectType].append(tempObject)
|
|
||||||
else:
|
|
||||||
tempObject = {}
|
|
||||||
for field in objectsToSave[objectType]['fields']:
|
|
||||||
tempObject[field] = temp['Event'][objectType][field]
|
|
||||||
event['Event'][objectType] = tempObject
|
|
||||||
return event
|
|
||||||
|
|
||||||
|
|
||||||
def saveManifest(manifest):
|
def saveManifest(manifest):
|
||||||
|
@ -138,4 +193,6 @@ if __name__ == '__main__':
|
||||||
print("Event " + str(counter) + "/" + str(total) + " exported.")
|
print("Event " + str(counter) + "/" + str(total) + " exported.")
|
||||||
counter += 1
|
counter += 1
|
||||||
saveManifest(manifest)
|
saveManifest(manifest)
|
||||||
print('Manifest saved. Feed creation completed.')
|
print('Manifest saved.')
|
||||||
|
saveHashes()
|
||||||
|
print('Hashes saved. Feed creation completed.')
|
||||||
|
|
Loading…
Reference in New Issue