2019-05-29 03:26:14 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2022-03-07 17:53:43 +01:00
|
|
|
import json
|
2019-05-29 03:26:14 +02:00
|
|
|
from collections import defaultdict
|
|
|
|
from datetime import datetime
|
|
|
|
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
2022-03-07 17:53:43 +01:00
|
|
|
from joe_mapping import (arch_type_mapping, domain_object_mapping,
|
|
|
|
dropped_file_mapping, dropped_hash_mapping, elf_object_mapping,
|
|
|
|
elf_section_flags_mapping, file_object_fields, file_object_mapping,
|
|
|
|
file_references_mapping, network_behavior_fields,
|
|
|
|
network_connection_object_mapping, pe_object_fields, pe_object_mapping,
|
|
|
|
pe_section_object_mapping, process_object_fields, protocols,
|
|
|
|
registry_references_mapping, regkey_object_mapping, signerinfo_object_mapping)
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
|
|
|
|
class JoeParser():
|
2020-01-24 14:51:10 +01:00
|
|
|
def __init__(self, config):
|
2019-05-29 03:26:14 +02:00
|
|
|
self.misp_event = MISPEvent()
|
|
|
|
self.references = defaultdict(list)
|
|
|
|
self.attributes = defaultdict(lambda: defaultdict(set))
|
|
|
|
self.process_references = {}
|
|
|
|
|
2022-03-07 17:53:43 +01:00
|
|
|
self.import_executable = config["import_executable"]
|
2020-01-24 14:51:10 +01:00
|
|
|
self.create_mitre_attack = config["mitre_attack"]
|
|
|
|
|
2019-06-03 10:35:58 +02:00
|
|
|
def parse_data(self, data):
|
|
|
|
self.data = data
|
2019-05-29 03:26:14 +02:00
|
|
|
if self.analysis_type() == "file":
|
|
|
|
self.parse_fileinfo()
|
|
|
|
else:
|
|
|
|
self.parse_url_analysis()
|
|
|
|
|
|
|
|
self.parse_system_behavior()
|
|
|
|
self.parse_network_behavior()
|
2019-06-21 10:53:12 +02:00
|
|
|
self.parse_screenshot()
|
2019-05-29 03:26:14 +02:00
|
|
|
self.parse_network_interactions()
|
|
|
|
self.parse_dropped_files()
|
|
|
|
|
|
|
|
if self.attributes:
|
|
|
|
self.handle_attributes()
|
2020-01-24 14:51:10 +01:00
|
|
|
|
|
|
|
if self.create_mitre_attack:
|
|
|
|
self.parse_mitre_attack()
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
def build_references(self):
|
|
|
|
for misp_object in self.misp_event.objects:
|
|
|
|
object_uuid = misp_object.uuid
|
|
|
|
if object_uuid in self.references:
|
|
|
|
for reference in self.references[object_uuid]:
|
2019-07-24 11:14:12 +02:00
|
|
|
misp_object.add_reference(**reference)
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
def handle_attributes(self):
|
|
|
|
for attribute_type, attribute in self.attributes.items():
|
|
|
|
for attribute_value, references in attribute.items():
|
|
|
|
attribute_uuid = self.create_attribute(attribute_type, attribute_value)
|
|
|
|
for reference in references:
|
|
|
|
source_uuid, relationship = reference
|
2019-07-24 11:14:12 +02:00
|
|
|
self.references[source_uuid].append(dict(referenced_uuid=attribute_uuid,
|
|
|
|
relationship_type=relationship))
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
def parse_dropped_files(self):
|
|
|
|
droppedinfo = self.data['droppedinfo']
|
|
|
|
if droppedinfo:
|
|
|
|
for droppedfile in droppedinfo['hash']:
|
|
|
|
file_object = MISPObject('file')
|
|
|
|
for key, mapping in dropped_file_mapping.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if droppedfile.get(key) is not None:
|
|
|
|
attribute = {'value': droppedfile[key], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
file_object.add_attribute(**attribute)
|
2019-05-29 03:26:14 +02:00
|
|
|
if droppedfile['@malicious'] == 'true':
|
2022-03-07 17:53:43 +01:00
|
|
|
file_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': 'state',
|
|
|
|
'value': 'Malicious',
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
for h in droppedfile['value']:
|
|
|
|
hash_type = dropped_hash_mapping[h['@algo']]
|
2022-03-07 17:53:43 +01:00
|
|
|
file_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': hash_type,
|
|
|
|
'object_relation': hash_type,
|
|
|
|
'value': h['$'],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(file_object)
|
|
|
|
reference_key = (int(droppedfile['@targetid']), droppedfile['@process'])
|
|
|
|
if reference_key in self.process_references:
|
|
|
|
self.references[self.process_references[reference_key]].append(
|
|
|
|
{
|
|
|
|
'referenced_uuid': file_object.uuid,
|
|
|
|
'relationship_type': 'drops'
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
def parse_mitre_attack(self):
|
2022-03-07 17:53:43 +01:00
|
|
|
mitreattack = self.data.get('mitreattack', {})
|
2019-05-29 03:26:14 +02:00
|
|
|
if mitreattack:
|
|
|
|
for tactic in mitreattack['tactic']:
|
|
|
|
if tactic.get('technique'):
|
|
|
|
for technique in tactic['technique']:
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_tag(f'misp-galaxy:mitre-attack-pattern="{technique["name"]} - {technique["id"]}"')
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
def parse_network_behavior(self):
|
|
|
|
network = self.data['behavior']['network']
|
|
|
|
connections = defaultdict(lambda: defaultdict(set))
|
|
|
|
for protocol, layer in protocols.items():
|
|
|
|
if network.get(protocol):
|
|
|
|
for packet in network[protocol]['packet']:
|
2019-06-03 10:35:58 +02:00
|
|
|
timestamp = datetime.strptime(self.parse_timestamp(packet['timestamp']), '%b %d, %Y %H:%M:%S.%f')
|
2022-04-07 16:10:15 +02:00
|
|
|
connections[tuple(packet.get(field) for field in network_behavior_fields)][protocol].add(timestamp)
|
2019-05-29 03:26:14 +02:00
|
|
|
for connection, data in connections.items():
|
|
|
|
attributes = self.prefetch_attributes_data(connection)
|
|
|
|
if len(data.keys()) == len(set(protocols[protocol] for protocol in data.keys())):
|
|
|
|
network_connection_object = MISPObject('network-connection')
|
2022-03-07 17:53:43 +01:00
|
|
|
for attribute in attributes:
|
|
|
|
network_connection_object.add_attribute(**attribute)
|
|
|
|
network_connection_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'datetime',
|
|
|
|
'object_relation': 'first-packet-seen',
|
|
|
|
'value': min(tuple(min(timestamp) for timestamp in data.values())),
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
for protocol in data.keys():
|
2022-03-07 17:53:43 +01:00
|
|
|
network_connection_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': f'layer{protocols[protocol]}-protocol',
|
|
|
|
'value': protocol,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(network_connection_object)
|
2019-07-24 11:14:12 +02:00
|
|
|
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
|
|
|
|
relationship_type='initiates'))
|
2019-05-29 03:26:14 +02:00
|
|
|
else:
|
|
|
|
for protocol, timestamps in data.items():
|
|
|
|
network_connection_object = MISPObject('network-connection')
|
2022-03-07 17:53:43 +01:00
|
|
|
for attribute in attributes:
|
|
|
|
network_connection_object.add_attribute(**attribute)
|
|
|
|
network_connection_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'datetime',
|
|
|
|
'object_relation': 'first-packet-seen',
|
|
|
|
'value': min(timestamps),
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
network_connection_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': f'layer{protocols[protocol]}-protocol',
|
|
|
|
'value': protocol,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(network_connection_object)
|
2019-07-24 11:14:12 +02:00
|
|
|
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
|
|
|
|
relationship_type='initiates'))
|
2019-05-29 03:26:14 +02:00
|
|
|
|
2019-06-21 10:53:12 +02:00
|
|
|
def parse_screenshot(self):
|
2022-03-07 17:53:43 +01:00
|
|
|
if self.data['behavior'].get('screenshotdata', {}).get('interesting') is not None:
|
|
|
|
screenshotdata = self.data['behavior']['screenshotdata']['interesting']['$']
|
|
|
|
self.misp_event.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'attachment',
|
|
|
|
'value': 'screenshot.jpg',
|
|
|
|
'data': screenshotdata,
|
|
|
|
'disable_correlation': True,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-06-21 10:53:12 +02:00
|
|
|
|
2019-05-29 03:26:14 +02:00
|
|
|
def parse_system_behavior(self):
|
2022-03-05 15:24:29 +01:00
|
|
|
if not 'system' in self.data['behavior']:
|
|
|
|
return
|
2019-05-29 03:26:14 +02:00
|
|
|
system = self.data['behavior']['system']
|
|
|
|
if system.get('processes'):
|
|
|
|
process_activities = {'fileactivities': self.parse_fileactivities,
|
|
|
|
'registryactivities': self.parse_registryactivities}
|
|
|
|
for process in system['processes']['process']:
|
|
|
|
general = process['general']
|
|
|
|
process_object = MISPObject('process')
|
|
|
|
for feature, relation in process_object_fields.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
process_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': relation,
|
|
|
|
'value': general[feature],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
start_time = datetime.strptime(f"{general['date']} {general['time']}", '%d/%m/%Y %H:%M:%S')
|
|
|
|
process_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'datetime',
|
|
|
|
'object_relation': 'start-time',
|
|
|
|
'value': start_time,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(process_object)
|
2019-05-29 03:26:14 +02:00
|
|
|
for field, to_call in process_activities.items():
|
2019-07-25 17:44:32 +02:00
|
|
|
if process.get(field):
|
|
|
|
to_call(process_object.uuid, process[field])
|
2019-07-24 11:14:12 +02:00
|
|
|
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=process_object.uuid,
|
|
|
|
relationship_type='calls'))
|
2019-05-29 03:26:14 +02:00
|
|
|
self.process_references[(general['targetid'], general['path'])] = process_object.uuid
|
|
|
|
|
|
|
|
def parse_fileactivities(self, process_uuid, fileactivities):
|
|
|
|
for feature, files in fileactivities.items():
|
|
|
|
# ignore unknown features
|
|
|
|
if feature not in file_references_mapping:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if files:
|
|
|
|
for call in files['call']:
|
|
|
|
self.attributes['filename'][call['path']].add((process_uuid, file_references_mapping[feature]))
|
|
|
|
|
|
|
|
def analysis_type(self):
|
|
|
|
generalinfo = self.data['generalinfo']
|
|
|
|
|
|
|
|
if generalinfo['target']['sample']:
|
|
|
|
return "file"
|
|
|
|
elif generalinfo['target']['url']:
|
|
|
|
return "url"
|
|
|
|
else:
|
|
|
|
raise Exception("Unknown analysis type")
|
|
|
|
|
|
|
|
def parse_url_analysis(self):
|
|
|
|
generalinfo = self.data["generalinfo"]
|
|
|
|
|
|
|
|
url_object = MISPObject("url")
|
|
|
|
self.analysisinfo_uuid = url_object.uuid
|
2022-03-07 17:53:43 +01:00
|
|
|
url_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'url',
|
|
|
|
'object_relation': 'url',
|
|
|
|
'value': generalinfo["target"]["url"],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(url_object)
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
def parse_fileinfo(self):
|
|
|
|
fileinfo = self.data['fileinfo']
|
|
|
|
|
|
|
|
file_object = MISPObject('file')
|
|
|
|
self.analysisinfo_uuid = file_object.uuid
|
|
|
|
|
|
|
|
for field in file_object_fields:
|
2022-03-07 17:53:43 +01:00
|
|
|
file_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': field,
|
|
|
|
'object_relation': field,
|
|
|
|
'value': fileinfo[field],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
for field, mapping in file_object_mapping.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if fileinfo.get(field) is not None:
|
|
|
|
attribute = {'value': fileinfo[field], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
file_object.add_attribute(**attribute)
|
2019-07-25 17:47:08 +02:00
|
|
|
arch = self.data['generalinfo']['arch']
|
2022-03-07 17:53:43 +01:00
|
|
|
if self.import_executable and arch in arch_type_mapping:
|
2019-07-25 17:47:08 +02:00
|
|
|
to_call = arch_type_mapping[arch]
|
2019-07-24 12:21:58 +02:00
|
|
|
getattr(self, to_call)(fileinfo, file_object)
|
2019-07-25 17:47:08 +02:00
|
|
|
else:
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(file_object)
|
2019-07-24 11:59:11 +02:00
|
|
|
|
2019-07-24 12:21:58 +02:00
|
|
|
def parse_apk(self, fileinfo, file_object):
|
|
|
|
apkinfo = fileinfo['apk']
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(file_object)
|
2019-07-24 11:59:11 +02:00
|
|
|
permission_lists = defaultdict(list)
|
|
|
|
for permission in apkinfo['requiredpermissions']['permission']:
|
|
|
|
permission = permission['@name'].split('.')
|
|
|
|
permission_lists[' '.join(permission[:-1])].append(permission[-1])
|
|
|
|
attribute_type = 'text'
|
|
|
|
for comment, permissions in permission_lists.items():
|
|
|
|
permission_object = MISPObject('android-permission')
|
2022-03-07 17:53:43 +01:00
|
|
|
permission_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': attribute_type,
|
|
|
|
'object_relation': 'comment',
|
|
|
|
'value': comment,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-07-24 11:59:11 +02:00
|
|
|
for permission in permissions:
|
2022-03-07 17:53:43 +01:00
|
|
|
permission_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': attribute_type,
|
|
|
|
'object_relation': 'permission',
|
|
|
|
'value': permission,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(permission_object)
|
2019-07-24 11:59:11 +02:00
|
|
|
self.references[file_object.uuid].append(dict(referenced_uuid=permission_object.uuid,
|
|
|
|
relationship_type='grants'))
|
|
|
|
|
2019-07-25 17:46:21 +02:00
|
|
|
def parse_elf(self, fileinfo, file_object):
|
|
|
|
elfinfo = fileinfo['elf']
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(file_object)
|
2019-07-25 17:46:21 +02:00
|
|
|
attribute_type = 'text'
|
|
|
|
relationship = 'includes'
|
|
|
|
size = 'size-in-bytes'
|
|
|
|
for fileinfo in elfinfo['file']:
|
|
|
|
elf_object = MISPObject('elf')
|
|
|
|
self.references[file_object.uuid].append(dict(referenced_uuid=elf_object.uuid,
|
|
|
|
relationship_type=relationship))
|
|
|
|
elf = fileinfo['main'][0]['header'][0]
|
|
|
|
if elf.get('type'):
|
|
|
|
# Haven't seen anything but EXEC yet in the files I tested
|
|
|
|
attribute_value = "EXECUTABLE" if elf['type'] == "EXEC (Executable file)" else elf['type']
|
2022-03-07 17:53:43 +01:00
|
|
|
elf_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': attribute_type,
|
|
|
|
'object_relation': 'type',
|
|
|
|
'value': attribute_value,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-07-25 17:46:21 +02:00
|
|
|
for feature, relation in elf_object_mapping.items():
|
|
|
|
if elf.get(feature):
|
2022-03-07 17:53:43 +01:00
|
|
|
elf_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': attribute_type,
|
|
|
|
'object_relation': relation,
|
|
|
|
'value': elf[feature],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-07-25 17:46:21 +02:00
|
|
|
sections_number = len(fileinfo['sections']['section'])
|
2022-03-07 17:53:43 +01:00
|
|
|
elf_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'counter',
|
|
|
|
'object_relation': 'number-sections',
|
|
|
|
'value': sections_number,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(elf_object)
|
2019-07-25 17:46:21 +02:00
|
|
|
for section in fileinfo['sections']['section']:
|
|
|
|
section_object = MISPObject('elf-section')
|
|
|
|
for feature in ('name', 'type'):
|
|
|
|
if section.get(feature):
|
2022-03-07 17:53:43 +01:00
|
|
|
section_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': attribute_type,
|
|
|
|
'object_relation': feature,
|
|
|
|
'value': section[feature],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-07-25 17:46:21 +02:00
|
|
|
if section.get('size'):
|
2022-03-07 17:53:43 +01:00
|
|
|
section_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': size,
|
|
|
|
'object_relation': size,
|
|
|
|
'value': int(section['size'], 16),
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-07-25 17:46:21 +02:00
|
|
|
for flag in section['flagsdesc']:
|
|
|
|
try:
|
|
|
|
attribute_value = elf_section_flags_mapping[flag]
|
2022-03-07 17:53:43 +01:00
|
|
|
section_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': attribute_type,
|
|
|
|
'object_relation': 'flag',
|
|
|
|
'value': attribute_value,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-07-25 17:46:21 +02:00
|
|
|
except KeyError:
|
|
|
|
print(f'Unknown elf section flag: {flag}')
|
|
|
|
continue
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(section_object)
|
2019-07-25 17:46:21 +02:00
|
|
|
self.references[elf_object.uuid].append(dict(referenced_uuid=section_object.uuid,
|
|
|
|
relationship_type=relationship))
|
|
|
|
|
2019-07-24 12:21:58 +02:00
|
|
|
def parse_pe(self, fileinfo, file_object):
|
2019-07-25 17:57:36 +02:00
|
|
|
try:
|
|
|
|
peinfo = fileinfo['pe']
|
|
|
|
except KeyError:
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(file_object)
|
2019-07-25 17:57:36 +02:00
|
|
|
return
|
2019-05-29 03:26:14 +02:00
|
|
|
pe_object = MISPObject('pe')
|
2019-07-25 17:46:21 +02:00
|
|
|
relationship = 'includes'
|
|
|
|
file_object.add_reference(pe_object.uuid, relationship)
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(file_object)
|
2019-05-29 03:26:14 +02:00
|
|
|
for field, mapping in pe_object_fields.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if peinfo.get(field) is not None:
|
|
|
|
attribute = {'value': peinfo[field], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
pe_object.add_attribute(**attribute)
|
|
|
|
pe_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'datetime',
|
|
|
|
'object_relation': 'compilation-timestamp',
|
|
|
|
'value': int(peinfo['timestamp'].split()[0], 16),
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
program_name = fileinfo['filename']
|
|
|
|
if peinfo['versions']:
|
|
|
|
for feature in peinfo['versions']['version']:
|
|
|
|
name = feature['name']
|
|
|
|
if name == 'InternalName':
|
|
|
|
program_name = feature['value']
|
|
|
|
if name in pe_object_mapping:
|
2022-03-07 17:53:43 +01:00
|
|
|
pe_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': pe_object_mapping[name],
|
|
|
|
'value': feature['value'],
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
sections_number = len(peinfo['sections']['section'])
|
2022-03-07 17:53:43 +01:00
|
|
|
pe_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'counter',
|
|
|
|
'object_relation': 'number-sections',
|
|
|
|
'value': sections_number,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
signatureinfo = peinfo['signature']
|
|
|
|
if signatureinfo['signed']:
|
|
|
|
signerinfo_object = MISPObject('authenticode-signerinfo')
|
|
|
|
pe_object.add_reference(signerinfo_object.uuid, 'signed-by')
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(pe_object)
|
|
|
|
signerinfo_object.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': 'program-name',
|
|
|
|
'value': program_name,
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
2019-05-29 03:26:14 +02:00
|
|
|
for feature, mapping in signerinfo_object_mapping.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if signatureinfo.get(feature) is not None:
|
|
|
|
attribute = {'value': signatureinfo[feature], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
signerinfo_object.add_attribute(**attribute)
|
|
|
|
self.misp_event.add_object(signerinfo_object)
|
2019-05-29 03:26:14 +02:00
|
|
|
else:
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(pe_object)
|
2019-05-29 03:26:14 +02:00
|
|
|
for section in peinfo['sections']['section']:
|
|
|
|
section_object = self.parse_pe_section(section)
|
2019-07-24 11:14:12 +02:00
|
|
|
self.references[pe_object.uuid].append(dict(referenced_uuid=section_object.uuid,
|
2019-07-25 17:46:21 +02:00
|
|
|
relationship_type=relationship))
|
2022-03-07 17:53:43 +01:00
|
|
|
self.misp_event.add_object(section_object)
|
2019-05-29 03:26:14 +02:00
|
|
|
|
2019-07-24 11:59:11 +02:00
|
|
|
def parse_pe_section(self, section):
|
|
|
|
section_object = MISPObject('pe-section')
|
|
|
|
for feature, mapping in pe_section_object_mapping.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if section.get(feature) is not None:
|
|
|
|
attribute = {'value': section[feature], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
section_object.add_attribute(**attribute)
|
2019-07-24 11:59:11 +02:00
|
|
|
return section_object
|
|
|
|
|
2019-05-29 03:26:14 +02:00
|
|
|
def parse_network_interactions(self):
|
|
|
|
domaininfo = self.data['domaininfo']
|
|
|
|
if domaininfo:
|
|
|
|
for domain in domaininfo['domain']:
|
2019-06-03 10:38:58 +02:00
|
|
|
if domain['@ip'] != 'unknown':
|
|
|
|
domain_object = MISPObject('domain-ip')
|
|
|
|
for key, mapping in domain_object_mapping.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if domain.get(key) is not None:
|
|
|
|
attribute = {'value': domain[key], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
domain_object.add_attribute(**attribute)
|
|
|
|
self.misp_event.add_object(domain_object)
|
2019-07-24 11:14:12 +02:00
|
|
|
reference = dict(referenced_uuid=domain_object.uuid, relationship_type='contacts')
|
2019-06-03 10:38:58 +02:00
|
|
|
self.add_process_reference(domain['@targetid'], domain['@currentpath'], reference)
|
|
|
|
else:
|
|
|
|
attribute = MISPAttribute()
|
2020-01-24 14:51:10 +01:00
|
|
|
attribute.from_dict(**{'type': 'domain', 'value': domain['@name'], 'to_ids': False})
|
2019-07-03 11:14:46 +02:00
|
|
|
self.misp_event.add_attribute(**attribute)
|
2019-07-24 11:14:12 +02:00
|
|
|
reference = dict(referenced_uuid=attribute.uuid, relationship_type='contacts')
|
2019-06-03 10:38:58 +02:00
|
|
|
self.add_process_reference(domain['@targetid'], domain['@currentpath'], reference)
|
2019-05-29 03:26:14 +02:00
|
|
|
ipinfo = self.data['ipinfo']
|
|
|
|
if ipinfo:
|
|
|
|
for ip in ipinfo['ip']:
|
|
|
|
attribute = MISPAttribute()
|
2020-01-24 14:51:10 +01:00
|
|
|
attribute.from_dict(**{'type': 'ip-dst', 'value': ip['@ip'], 'to_ids': False})
|
2019-05-29 03:26:14 +02:00
|
|
|
self.misp_event.add_attribute(**attribute)
|
2019-07-24 11:14:12 +02:00
|
|
|
reference = dict(referenced_uuid=attribute.uuid, relationship_type='contacts')
|
2019-06-03 10:38:58 +02:00
|
|
|
self.add_process_reference(ip['@targetid'], ip['@currentpath'], reference)
|
2019-05-29 03:26:14 +02:00
|
|
|
urlinfo = self.data['urlinfo']
|
|
|
|
if urlinfo:
|
|
|
|
for url in urlinfo['url']:
|
|
|
|
target_id = int(url['@targetid'])
|
|
|
|
current_path = url['@currentpath']
|
|
|
|
attribute = MISPAttribute()
|
2020-01-24 14:51:10 +01:00
|
|
|
attribute_dict = {'type': 'url', 'value': url['@name'], 'to_ids': False}
|
2019-05-29 03:26:14 +02:00
|
|
|
if target_id != -1 and current_path != 'unknown':
|
|
|
|
self.references[self.process_references[(target_id, current_path)]].append({
|
2019-07-24 11:14:12 +02:00
|
|
|
'referenced_uuid': attribute.uuid,
|
|
|
|
'relationship_type': 'contacts'
|
2019-05-29 03:26:14 +02:00
|
|
|
})
|
|
|
|
else:
|
|
|
|
attribute_dict['comment'] = 'From Memory - Enriched via the joe_import module'
|
|
|
|
attribute.from_dict(**attribute_dict)
|
|
|
|
self.misp_event.add_attribute(**attribute)
|
|
|
|
|
|
|
|
def parse_registryactivities(self, process_uuid, registryactivities):
|
|
|
|
if registryactivities['keyCreated']:
|
|
|
|
for call in registryactivities['keyCreated']['call']:
|
|
|
|
self.attributes['regkey'][call['path']].add((process_uuid, 'creates'))
|
2019-07-24 11:14:12 +02:00
|
|
|
for feature, relationship in registry_references_mapping.items():
|
2019-05-29 03:26:14 +02:00
|
|
|
if registryactivities[feature]:
|
|
|
|
for call in registryactivities[feature]['call']:
|
|
|
|
registry_key = MISPObject('registry-key')
|
|
|
|
for field, mapping in regkey_object_mapping.items():
|
2022-03-07 17:53:43 +01:00
|
|
|
if call.get(field) is not None:
|
|
|
|
attribute = {'value': call[field], 'to_ids': False}
|
|
|
|
attribute.update(mapping)
|
|
|
|
registry_key.add_attribute(**attribute)
|
|
|
|
registry_key.add_attribute(
|
|
|
|
**{
|
|
|
|
'type': 'text',
|
|
|
|
'object_relation': 'data-type',
|
|
|
|
'value': f"REG_{call['type'].upper()}",
|
|
|
|
'to_ids': False
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.misp_event.add_object(registry_key)
|
2019-07-24 11:14:12 +02:00
|
|
|
self.references[process_uuid].append(dict(referenced_uuid=registry_key.uuid,
|
|
|
|
relationship_type=relationship))
|
2019-05-29 03:26:14 +02:00
|
|
|
|
2019-06-03 10:38:58 +02:00
|
|
|
def add_process_reference(self, target, currentpath, reference):
|
|
|
|
try:
|
|
|
|
self.references[self.process_references[(int(target), currentpath)]].append(reference)
|
|
|
|
except KeyError:
|
|
|
|
self.references[self.analysisinfo_uuid].append(reference)
|
|
|
|
|
2019-05-29 03:26:14 +02:00
|
|
|
def create_attribute(self, attribute_type, attribute_value):
|
|
|
|
attribute = MISPAttribute()
|
2020-01-24 14:51:10 +01:00
|
|
|
attribute.from_dict(**{'type': attribute_type, 'value': attribute_value, 'to_ids': False})
|
2019-05-29 03:26:14 +02:00
|
|
|
self.misp_event.add_attribute(**attribute)
|
|
|
|
return attribute.uuid
|
|
|
|
|
|
|
|
def finalize_results(self):
|
2019-06-03 10:35:58 +02:00
|
|
|
if self.references:
|
|
|
|
self.build_references()
|
2019-08-05 11:33:04 +02:00
|
|
|
event = json.loads(self.misp_event.to_json())
|
2019-06-15 08:05:14 +02:00
|
|
|
self.results = {key: event[key] for key in ('Attribute', 'Object', 'Tag') if (key in event and event[key])}
|
2019-05-29 03:26:14 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def parse_timestamp(timestamp):
|
|
|
|
timestamp = timestamp.split(':')
|
|
|
|
timestamp[-1] = str(round(float(timestamp[-1].split(' ')[0]), 6))
|
|
|
|
return ':'.join(timestamp)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def prefetch_attributes_data(connection):
|
2022-03-07 17:53:43 +01:00
|
|
|
attributes = []
|
2019-05-29 03:26:14 +02:00
|
|
|
for field, value in zip(network_behavior_fields, connection):
|
2022-03-07 17:53:43 +01:00
|
|
|
attribute = {'value': value, 'to_ids': False}
|
|
|
|
attribute.update(network_connection_object_mapping[field])
|
|
|
|
attributes.append(attribute)
|
2019-05-29 03:26:14 +02:00
|
|
|
return attributes
|