Merge branch 'master' of github.com:MISP/misp-modules

pull/194/head
Alexandre Dulaunoy 2018-05-29 21:59:25 +02:00
commit 2d9b0cd172
No known key found for this signature in database
GPG Key ID: 09E2CD4944E6CBCD
12 changed files with 430 additions and 186 deletions

View File

@ -45,6 +45,7 @@ For more information: [Extending MISP with Python modules](https://www.circl.lu/
### Export modules
* [CEF](misp_modules/modules/export_mod/cef_export.py) module to export Common Event Format (CEF).
* [GoAML export](misp_modules/modules/export_mod/goamlexport.py) module to export in GoAML format.
* [Lite Export](misp_modules/modules/export_mod/liteexport.py) module to export a lite event.
* [Simple PDF export](misp_modules/modules/export_mod/pdfexport.py) module to export in PDF (required: asciidoctor-pdf).
* [ThreatConnect](misp_modules/modules/export_mod/threat_connect_export.py) module to export in ThreatConnect CSV format.

View File

@ -1,5 +1,5 @@
import json
from ._dnsdb_query.dnsdb_query import DnsdbClient
from ._dnsdb_query.dnsdb_query import DnsdbClient, QueryError
misperrors = {'error': 'Error'}
@ -41,26 +41,35 @@ def handler(q=False):
def lookup_name(client, name):
res = client.query_rrset(name) # RRSET = entries in the left-hand side of the domain name related labels
for item in res:
if item.get('rrtype') in ['A', 'AAAA', 'CNAME']:
for i in item.get('rdata'):
yield(i.rstrip('.'))
if item.get('rrtype') in ['SOA']:
for i in item.get('rdata'):
# grab email field and replace first dot by @ to convert to an email address
yield(i.split(' ')[1].rstrip('.').replace('.', '@', 1))
# res = client.query_rdata_name(name) # RDATA = entries on the right-hand side of the domain name related labels
# for item in res:
# if item.get('rrtype') in ['A', 'AAAA', 'CNAME']:
# yield(item.get('rrname').rstrip('.'))
try:
res = client.query_rrset(name) # RRSET = entries in the left-hand side of the domain name related labels
for item in res:
if item.get('rrtype') in ['A', 'AAAA', 'CNAME']:
for i in item.get('rdata'):
yield(i.rstrip('.'))
if item.get('rrtype') in ['SOA']:
for i in item.get('rdata'):
# grab email field and replace first dot by @ to convert to an email address
yield(i.split(' ')[1].rstrip('.').replace('.', '@', 1))
except QueryError as e:
pass
try:
res = client.query_rdata_name(name) # RDATA = entries on the right-hand side of the domain name related labels
for item in res:
if item.get('rrtype') in ['A', 'AAAA', 'CNAME']:
yield(item.get('rrname').rstrip('.'))
except QueryError as e:
pass
def lookup_ip(client, ip):
res = client.query_rdata_ip(ip)
for item in res:
print(item)
yield(item['rrname'].rstrip('.'))
try:
res = client.query_rdata_ip(ip)
for item in res:
yield(item['rrname'].rstrip('.'))
except QueryError as e:
pass
def introspection():

View File

@ -45,7 +45,7 @@ def findAll(data, keys):
return a
def valid_email(email):
return bool(re.search(r"^[\w\.\+\-]+\@[\w]+\.[a-z]{2,3}$", email))
return bool(re.search(r"[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?", email))
def handler(q=False):
if q is False:

View File

@ -14,7 +14,7 @@ extensions = {"ip1": "ipr/%s",
sys.path.append('./')
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src','ip-dst' 'vulnerability', 'md5', 'sha1', 'sha256'],
mispattributes = {'input': ['ip-src', 'ip-dst', 'vulnerability', 'md5', 'sha1', 'sha256'],
'output': ['ip-src', 'ip-dst', 'text', 'domain']}
# possible module-types: 'expansion', 'hover' or both

View File

@ -5,7 +5,8 @@ from collections import defaultdict, Counter
misperrors = {'error': 'Error'}
moduleinfo = {'version': '1', 'author': 'Christian Studer',
'description': 'Export to GoAML',
'module-type': ['export']}
'module-type': ['export'],
'require_standard_format': True}
moduleconfig = ['rentity_id']
mispattributes = {'input': ['MISPEvent'], 'output': ['xml file']}
outputFileExtension = "xml"
@ -13,30 +14,37 @@ responseType = "application/xml"
objects_to_parse = ['transaction', 'bank-account', 'person', 'entity', 'geolocation']
goAMLmapping = {'bank-account': 't_account', 'institution-code': 'institution_code', 'iban': 'iban',
'swift': 'swift', 'branch': 'branch', 'non-banking-institution': 'non_bank_institution',
'account': 'account', 'currency-code': 'currency_code', 'account-name': 'account_name',
'client-number': 'client_number', 'personal-account-type': 'personal_account_type',
'opened': 'opened', 'closed': 'closed', 'balance': 'balance', 'status-code': 'status_code',
'beneficiary': 'beneficiary', 'beneficiary-comment': 'beneficiary_comment', 'comments': 'comments',
'person': 't_person', 'text': 'comments', 'first-name': 'first_name', 'middle-name': 'middle_name',
'last-name': 'last_name', 'mothers-name': 'mothers_name', 'title': 'title', 'alias': 'alias',
'date-of-birth': 'birthdate', 'place-of-birth': 'birth_place', 'gender': 'gender',
'passport-number': 'passport_number', 'passport-country': 'passport_country',
'social-security-number': 'ssn', 'nationality': 'nationality1', 'identity-card-number': 'id_number',
'geolocation': 'location', 'city': 'city', 'region': 'state', 'country': 'country-code',
'address': 'address', 'zipcode': 'zip',
'transaction': 'transaction', 'transaction-number': 'transactionnumber', 'date': 'date_transaction',
'location': 'transaction_location', 'transmode-code': 'transmode_code', 'amount': 'amount_local',
'transmode-comment': 'transmode_comment', 'date-posting': 'date_posting', 'teller': 'teller',
'authorized': 'authorized',
'legal-entity': 'entity', 'name': 'name', 'commercial-name': 'commercial_name', 'business': 'business',
'legal-form': 'incorporation_legal_form', 'registration-number': 'incorporation_number',
'phone-number': 'phone'}
goAMLmapping = {'bank-account': {'bank-account': 't_account', 'institution-name': 'institution_name',
'institution-code': 'institution_code', 'iban': 'iban', 'swift': 'swift',
'branch': 'branch', 'non-banking-institution': 'non_bank_institution',
'account': 'account', 'currency-code': 'currency_code',
'account-name': 'account_name', 'client-number': 'client_number',
'personal-account-type': 'personal_account_type', 'opened': 'opened',
'closed': 'closed', 'balance': 'balance', 'status-code': 'status_code',
'beneficiary': 'beneficiary', 'beneficiary-comment': 'beneficiary_comment',
'comments': 'comments'},
'person': {'person': 't_person', 'text': 'comments', 'first-name': 'first_name',
'middle-name': 'middle_name', 'last-name': 'last_name', 'title': 'title',
'mothers-name': 'mothers_name', 'alias': 'alias', 'date-of-birth': 'birthdate',
'place-of-birth': 'birth_place', 'gender': 'gender','nationality': 'nationality1',
'passport-number': 'passport_number', 'passport-country': 'passport_country',
'social-security-number': 'ssn', 'identity-card-number': 'id_number'},
'geolocation': {'geolocation': 'location', 'city': 'city', 'region': 'state',
'country': 'country_code', 'address': 'address', 'zipcode': 'zip'},
'transaction': {'transaction': 'transaction', 'transaction-number': 'transactionnumber',
'date': 'date_transaction', 'location': 'transaction_location',
'transmode-code': 'transmode_code', 'amount': 'amount_local',
'transmode-comment': 'transmode_comment', 'date-posting': 'date_posting',
'teller': 'teller', 'authorized': 'authorized',
'text': 'transaction_description'},
'legal-entity': {'legal-entity': 'entity', 'name': 'name', 'business': 'business',
'commercial-name': 'commercial_name', 'phone-number': 'phone',
'legal-form': 'incorporation_legal_form',
'registration-number': 'incorporation_number'}}
referencesMapping = {'bank-account': {'aml_type': '{}_account', 'bracket': 't_{}'},
'person': {'transaction': {'aml_type': '{}_person', 'bracket': 't_{}'}, 'bank-account': {'aml_type': 't_person', 'bracket': 'signatory'}},
'legal-entity': {'transaction': {'aml_type': '{}_entity', 'bracket': 't_{}'}, 'bank-account': {'aml_type': 'entity'}},
'legal-entity': {'transaction': {'aml_type': '{}_entity', 'bracket': 't_{}'}, 'bank-account': {'aml_type': 't_entity'}},
'geolocation': {'aml_type': 'address', 'bracket': 'addresses'}}
class GoAmlGeneration(object):
@ -88,7 +96,7 @@ class GoAmlGeneration(object):
obj = self.misp_event.get_object_by_uuid(uuid)
if object_type == 'transaction':
self.xml[xml_part] += "<{}>".format(aml_type)
self.fill_xml_transaction(obj.attributes, xml_part)
self.fill_xml_transaction(object_type, obj.attributes, xml_part)
self.parsed_uuids[object_type].append(uuid)
if obj.ObjectReference:
self.parseObjectReferences(object_type, xml_part, obj.ObjectReference)
@ -104,7 +112,7 @@ class GoAmlGeneration(object):
def itterate_normal_case(self, object_type, obj, aml_type, uuid, xml_part):
self.xml[xml_part] += "<{}>".format(aml_type)
self.fill_xml(obj, xml_part)
self.fill_xml(object_type, obj, xml_part)
self.parsed_uuids[object_type].append(uuid)
if obj.ObjectReference:
self.parseObjectReferences(object_type, xml_part, obj.ObjectReference)
@ -117,7 +125,7 @@ class GoAmlGeneration(object):
relationship_type = ref.relationship_type
self.parse_references(object_type, next_object_type, next_uuid, relationship_type, xml_part)
def fill_xml_transaction(self, attributes, xml_part):
def fill_xml_transaction(self, object_type, attributes, xml_part):
from_and_to_fields = {'from': {}, 'to': {}}
for attribute in attributes:
object_relation = attribute.object_relation
@ -133,12 +141,12 @@ class GoAmlGeneration(object):
from_and_to_fields[relation_type][field] = attribute_value
continue
try:
self.xml[xml_part] += "<{0}>{1}</{0}>".format(goAMLmapping[object_relation], attribute_value)
self.xml[xml_part] += "<{0}>{1}</{0}>".format(goAMLmapping[object_type][object_relation], attribute_value)
except KeyError:
pass
self.from_and_to_fields = from_and_to_fields
def fill_xml(self, obj, xml_part):
def fill_xml(self, object_type, obj, xml_part):
if obj.name == 'bank-account':
for attribute in obj.attributes:
if attribute.object_relation in ('personal-account-type', 'status-code'):
@ -146,13 +154,13 @@ class GoAmlGeneration(object):
else:
attribute_value = attribute.value
try:
self.xml[xml_part] += "<{0}>{1}</{0}>".format(goAMLmapping[attribute.object_relation], attribute_value)
self.xml[xml_part] += "<{0}>{1}</{0}>".format(goAMLmapping[object_type][attribute.object_relation], attribute_value)
except KeyError:
pass
else:
for attribute in obj.attributes:
try:
self.xml[xml_part] += "<{0}>{1}</{0}>".format(goAMLmapping[attribute.object_relation], attribute.value)
self.xml[xml_part] += "<{0}>{1}</{0}>".format(goAMLmapping[object_type][attribute.object_relation], attribute.value)
except KeyError:
pass
@ -186,6 +194,15 @@ def handler(q=False):
config = request['config'].get('rentity_id')
export_doc = GoAmlGeneration(config)
export_doc.from_event(request['data'][0])
if not export_doc.misp_event.Object:
misperrors['error'] = "There is no object in this event."
return misperrors
types = []
for obj in export_doc.misp_event.Object:
types.append(obj.name)
if 'transaction' not in types:
misperrors['error'] = "There is no transaction object in this event."
return misperrors
export_doc.parse_objects()
export_doc.build_xml()
exp_doc = "{}{}".format(export_doc.xml.get('header'), export_doc.xml.get('data'))

View File

@ -15,7 +15,8 @@ misperrors = {'error': 'Error'}
moduleinfo = {'version': '1',
'author': 'Raphaël Vinot',
'description': 'Simple export to PDF',
'module-type': ['export']}
'module-type': ['export'],
'require_standard_format': True}
moduleconfig = []

View File

@ -1,4 +1,4 @@
from . import _vmray
__all__ = ['vmray_import', 'testimport', 'ocr', 'stiximport', 'cuckooimport',
__all__ = ['vmray_import', 'testimport', 'ocr', 'stiximport', 'cuckooimport', 'goamlimport',
'email_import', 'mispjson', 'openiocimport', 'threatanalyzer_import', 'csvimport']

View File

@ -1,131 +1,156 @@
# -*- coding: utf-8 -*-
import json, os
import json, os, base64
import pymisp
misperrors = {'error': 'Error'}
mispattributes = {'input': ['file'], 'output': ['MISP attributes']}
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
'description': 'Import Attributes from a csv file.',
'module-type': ['import']}
moduleconfig = ['header']
moduleconfig = []
inputSource = ['file']
userConfig = {'header': {
'type': 'String',
'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'},
'has_header':{
'type': 'Boolean',
'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, in the file (which will be skipped atm).'
}}
duplicatedFields = {'mispType': {'mispComment': 'comment'},
'attrField': {'eventComment': 'comment'}}
'attrField': {'attrComment': 'comment'}}
attributesFields = ['type', 'value', 'category', 'to_ids', 'comment', 'distribution']
delimiters = [',', ';', '|', '/', '\t', ' ']
class CsvParser():
def __init__(self, header, has_header):
self.header = header
self.fields_number = len(header)
self.has_header = has_header
self.attributes = []
def parse_data(self, data):
return_data = []
if self.fields_number == 1:
for line in data:
l = line.split('#')[0].strip()
if l:
return_data.append(l)
self.delimiter = None
else:
self.delimiter_count = dict([(d, 0) for d in delimiters])
for line in data:
l = line.split('#')[0].strip()
if l:
self.parse_delimiter(l)
return_data.append(l)
# find which delimiter is used
self.delimiter = self.find_delimiter()
self.data = return_data[1:] if self.has_header else return_data
def parse_delimiter(self, line):
for d in delimiters:
if line.count(d) >= (self.fields_number - 1):
self.delimiter_count[d] += 1
def find_delimiter(self):
_, delimiter = max((n, v) for v, n in self.delimiter_count.items())
return delimiter
def buildAttributes(self):
# if there is only 1 field of data
if self.delimiter is None:
mispType = self.header[0]
for data in self.data:
d = data.strip()
if d:
self.attributes.append({'types': mispType, 'values': d})
else:
# split fields that should be recognized as misp attribute types from the others
list2pop, misp, head = self.findMispTypes()
# for each line of data
for data in self.data:
datamisp = []
datasplit = data.split(self.delimiter)
# in case there is an empty line or an error
if len(datasplit) != self.fields_number:
continue
# pop from the line data that matches with a misp type, using the list of indexes
for l in list2pop:
datamisp.append(datasplit.pop(l).strip())
# for each misp type, we create an attribute
for m, dm in zip(misp, datamisp):
attribute = {'types': m, 'values': dm}
for h, ds in zip(head, datasplit):
if h:
attribute[h] = ds.strip()
self.attributes.append(attribute)
def findMispTypes(self):
descFilename = os.path.join(pymisp.__path__[0], 'data/describeTypes.json')
with open(descFilename, 'r') as f:
MispTypes = json.loads(f.read())['result'].get('types')
list2pop = []
misp = []
head = []
for h in reversed(self.header):
n = self.header.index(h)
# fields that are misp attribute types
if h in MispTypes:
list2pop.append(n)
misp.append(h)
# handle confusions between misp attribute types and attribute fields
elif h in duplicatedFields['mispType']:
# fields that should be considered as misp attribute types
list2pop.append(n)
misp.append(duplicatedFields['mispType'].get(h))
elif h in duplicatedFields['attrField']:
# fields that should be considered as attribute fields
head.append(duplicatedFields['attrField'].get(h))
# or, it could be an attribute field
elif h in attributesFields:
head.append(h)
# otherwise, it is not defined
else:
head.append('')
# return list of indexes of the misp types, list of the misp types, remaining fields that will be attribute fields
return list2pop, misp, list(reversed(head))
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('file'):
filename = request['file']
if request.get('data'):
data = base64.b64decode(request['data']).decode('utf-8')
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
if not request.get('config') and not request['config'].get('header'):
misperrors['error'] = "Configuration error"
return misperrors
config = request['config'].get('header')
#header = []
try:
data = readFile(filename, 'utf-8')
except:
data = readFile(filename, 'iso-8859-1')
# find which delimiter is used
delimiter, length = findDelimiter(config, data)
header = request['config'].get('header').split(',')
header = [c.strip() for c in header]
has_header = request['config'].get('has_header')
has_header = True if has_header == '1' else False
csv_parser = CsvParser(header, has_header)
csv_parser.parse_data(data.split('\n'))
# build the attributes
result = buildAttributes(config, data, delimiter, length)
r = {'results': [{'types': mispattributes['output'], 'values': result}]}
csv_parser.buildAttributes()
r = {'results': csv_parser.attributes}
return r
def readFile(filename, encoding):
data = []
with open(filename, 'r', encoding=encoding) as f:
for line in f:
# split comments from data
if '#' in line:
l = line.split('#')[0].strip()
else:
l = line.strip()
if l:
data.append(l)
return data
def findDelimiter(header, data):
n = len(header)
if n > 1:
tmpData = []
for da in data:
tmp = []
for d in (';', '|', '/', ',', '\t', ' ',):
if da.count(d) == (n-1):
tmp.append(d)
if len(tmp) == 1 and tmp == tmpData:
return tmpData[0], n
else:
tmpData = tmp
else:
return None, 1
def buildAttributes(header, dataValues, delimiter, length):
attributes = []
# if there is only 1 field of data
if delimiter is None:
mispType = header[0]
for data in dataValues:
d = data.strip()
if d:
attributes.append({'type': mispType, 'value': d})
else:
# split fields that should be recognized as misp attribute types from the others
list2pop, misp, head = findMispTypes(header)
# for each line of data
for data in dataValues:
datamisp = []
datasplit = data.split(delimiter)
# in case there is an empty line or an error
if len(datasplit) != length:
continue
# pop from the line data that matches with a misp type, using the list of indexes
for l in list2pop:
datamisp.append(datasplit.pop(l).strip())
# for each misp type, we create an attribute
for m, dm in zip(misp, datamisp):
attribute = {'type': m, 'value': dm}
for h, ds in zip(head, datasplit):
if h:
attribute[h] = ds.strip()
attributes.append(attribute)
return attributes
def findMispTypes(header):
descFilename = os.path.join(pymisp.__path__[0], 'data/describeTypes.json')
with open(descFilename, 'r') as f:
MispTypes = json.loads(f.read())['result'].get('types')
list2pop = []
misp = []
head = []
for h in reversed(header):
n = header.index(h)
# fields that are misp attribute types
if h in MispTypes:
list2pop.append(n)
misp.append(h)
# handle confusions between misp attribute types and attribute fields
elif h in duplicatedFields['mispType']:
# fields that should be considered as misp attribute types
list2pop.append(n)
misp.append(duplicatedFields['mispType'].get(h))
elif h in duplicatedFields['attrField']:
# fields that should be considered as attribute fields
head.append(duplicatedFields['attrField'].get(h))
# otherwise, it is an attribute field
else:
head.append(h)
# return list of indexes of the misp types, list of the misp types, remaining fields that will be attribute fields
return list2pop, misp, list(reversed(head))
def introspection():
return mispattributes
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig

View File

@ -0,0 +1,172 @@
import json, datetime, time, base64
import xml.etree.ElementTree as ET
from collections import defaultdict
from pymisp import MISPEvent, MISPObject
misperrors = {'error': 'Error'}
moduleinfo = {'version': 1, 'author': 'Christian Studer',
'description': 'Import from GoAML',
'module-type': ['import']}
moduleconfig = []
mispattributes = {'inputSource': ['file'], 'output': ['MISP objects']}
t_from_objects = {'nodes': ['from_person', 'from_account', 'from_entity'],
'leaves': ['from_funds_code', 'from_country']}
t_to_objects = {'nodes': ['to_person', 'to_account', 'to_entity'],
'leaves': ['to_funds_code', 'to_country']}
t_person_objects = {'nodes': ['addresses'],
'leaves': ['first_name', 'middle_name', 'last_name', 'gender', 'title', 'mothers_name', 'birthdate',
'passport_number', 'passport_country', 'id_number', 'birth_place', 'alias', 'nationality1']}
t_account_objects = {'nodes': ['signatory'],
'leaves': ['institution_name', 'institution_code', 'swift', 'branch', 'non_banking_insitution',
'account', 'currency_code', 'account_name', 'iban', 'client_number', 'opened', 'closed',
'personal_account_type', 'balance', 'date_balance', 'status_code', 'beneficiary',
'beneficiary_comment', 'comments']}
entity_objects = {'nodes': ['addresses'],
'leaves': ['name', 'commercial_name', 'incorporation_legal_form', 'incorporation_number', 'business', 'phone']}
goAMLobjects = {'report': {'nodes': ['reporting_person', 'location'],
'leaves': ['rentity_id', 'submission_code', 'report_code', 'submission_date', 'currency_code_local']},
'reporting_person': {'nodes': ['addresses'], 'leaves': ['first_name', 'middle_name', 'last_name', 'title']},
'location': {'nodes': [], 'leaves': ['address_type', 'address', 'city', 'zip', 'country_code', 'state']},
'transaction': {'nodes': ['t_from', 't_from_my_client', 't_to', 't_to_my_client'],
'leaves': ['transactionnumber', 'transaction_location', 'date_transaction',
'transmode_code', 'amount_local']},
't_from': t_from_objects, 't_from_my_client': t_from_objects,
't_to': t_to_objects, 't_to_my_client': t_to_objects,
'addresses': {'nodes': ['address'], 'leaves': []},
'address': {'nodes': [], 'leaves': ['address_type', 'address', 'city', 'zip', 'country_code', 'state']},
'from_person': t_person_objects, 'to_person': t_person_objects, 't_person': t_person_objects,
'from_account': t_account_objects, 'to_account': t_account_objects,
'signatory': {'nodes': ['t_person'], 'leaves': []},
'from_entity': entity_objects, 'to_entity': entity_objects,
}
t_account_mapping = {'misp_name': 'bank-account', 'institution_name': 'institution-name', 'institution_code': 'institution-code',
'iban': 'iban', 'swift': 'swift', 'branch': 'branch', 'non_banking_institution': 'non-bank-institution',
'account': 'account', 'currency_code': 'currency-code', 'account_name': 'account-name',
'client_number': 'client-number', 'personal_account_type': 'personal-account-type', 'opened': 'opened',
'closed': 'closed', 'balance': 'balance', 'status_code': 'status-code', 'beneficiary': 'beneficiary',
'beneficiary_comment': 'beneficiary-comment', 'comments': 'comments'}
t_person_mapping = {'misp_name': 'person', 'comments': 'text', 'first_name': 'first-name', 'middle_name': 'middle-name',
'last_name': 'last-name', 'title': 'title', 'mothers_name': 'mothers-name', 'alias': 'alias',
'birthdate': 'date-of-birth', 'birth_place': 'place-of-birth', 'gender': 'gender','nationality1': 'nationality',
'passport_number': 'passport-number', 'passport_country': 'passport-country', 'ssn': 'social-security-number',
'id_number': 'identity-card-number'}
location_mapping = {'misp_name': 'geolocation', 'city': 'city', 'state': 'region', 'country_code': 'country', 'address': 'address',
'zip': 'zipcode'}
t_entity_mapping = {'misp_name': 'legal-entity', 'name': 'name', 'business': 'business', 'commercial_name': 'commercial-name',
'phone': 'phone-number', 'incorporation_legal_form': 'legal-form', 'incorporation_number': 'registration-number'}
goAMLmapping = {'from_account': t_account_mapping, 'to_account': t_account_mapping, 't_person': t_person_mapping,
'from_person': t_person_mapping, 'to_person': t_person_mapping, 'reporting_person': t_person_mapping,
'from_entity': t_entity_mapping, 'to_entity': t_entity_mapping,
'location': location_mapping, 'address': location_mapping,
'transaction': {'misp_name': 'transaction', 'transactionnumber': 'transaction-number', 'date_transaction': 'date',
'transaction_location': 'location', 'transmode_code': 'transmode-code', 'amount_local': 'amount',
'transmode_comment': 'transmode-comment', 'date_posting': 'date-posting', 'teller': 'teller',
'authorized': 'authorized', 'transaction_description': 'text'}}
nodes_to_ignore = ['addresses', 'signatory']
relationship_to_keep = ['signatory', 't_from', 't_from_my_client', 't_to', 't_to_my_client', 'address']
class GoAmlParser():
def __init__(self):
self.misp_event = MISPEvent()
def read_xml(self, data):
self.tree = ET.fromstring(data)
def parse_xml(self):
self.first_itteration()
for t in self.tree.findall('transaction'):
self.itterate(t, 'transaction')
def first_itteration(self):
submission_date = self.tree.find('submission_date').text.split('+')[0]
self.misp_event.timestamp = int(time.mktime(time.strptime(submission_date, "%Y-%m-%dT%H:%M:%S")))
for node in goAMLobjects['report']['nodes']:
element = self.tree.find(node)
if element is not None:
self.itterate(element, element.tag)
def itterate(self, tree, aml_type, referencing_uuid=None, relationship_type=None):
objects = goAMLobjects[aml_type]
referenced_uuid = referencing_uuid
rel = relationship_type
if aml_type not in nodes_to_ignore:
try:
mapping = goAMLmapping[aml_type]
misp_object = MISPObject(name=mapping['misp_name'])
for leaf in objects['leaves']:
element = tree.find(leaf)
if element is not None:
object_relation = mapping[element.tag]
attribute = {'object_relation': object_relation, 'value': element.text}
misp_object.add_attribute(**attribute)
if aml_type == 'transaction':
for node in objects['nodes']:
element = tree.find(node)
if element is not None:
self.fill_transaction(element, element.tag, misp_object)
self.misp_event.add_object(misp_object)
last_object = self.misp_event.objects[-1]
referenced_uuid = last_object.uuid
if referencing_uuid and relationship_type:
referencing_object = self.misp_event.get_object_by_uuid(referencing_uuid)
referencing_object.add_reference(referenced_uuid, rel, None, **last_object)
except KeyError:
pass
for node in objects['nodes']:
element = tree.find(node)
if element is not None:
tag = element.tag
if tag in relationship_to_keep:
rel = tag[2:] if tag.startswith('t_') else tag
self.itterate(element, element.tag, referencing_uuid=referenced_uuid, relationship_type=rel)
@staticmethod
def fill_transaction(element, tag, misp_object):
if 't_from' in tag:
from_funds = element.find('from_funds_code').text
from_funds_attribute = {'object_relation': 'from-funds-code', 'value': from_funds}
misp_object.add_attribute(**from_funds_attribute)
from_country = element.find('from_country').text
from_country_attribute = {'object_relation': 'from-country', 'value': from_country}
misp_object.add_attribute(**from_country_attribute)
if 't_to' in tag:
to_funds = element.find('to_funds_code').text
to_funds_attribute = {'object_relation': 'to-funds-code', 'value': to_funds}
misp_object.add_attribute(**to_funds_attribute)
to_country = element.find('to_country').text
to_country_attribute = {'object_relation': 'to-country', 'value': to_country}
misp_object.add_attribute(**to_country_attribute)
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('data'):
data = base64.b64decode(request['data']).decode('utf-8')
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
aml_parser = GoAmlParser()
try:
aml_parser.read_xml(data)
except:
misperrors['error'] = "Impossible to read XML data"
return misperrors
aml_parser.parse_xml()
r = {'results': [obj.to_json() for obj in aml_parser.misp_event.objects]}
return r
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -15,7 +15,7 @@ misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.6', 'author': 'Christophe Vandeplas',
moduleinfo = {'version': '0.7', 'author': 'Christophe Vandeplas',
'description': 'Import for ThreatAnalyzer archive.zip/analysis.json files',
'module-type': ['import']}
@ -62,12 +62,12 @@ def handler(q=False):
if re.match(r"Analysis/proc_\d+/modified_files/.+\.", zip_file_name) and "mapping.log" not in zip_file_name:
sample_md5 = zip_file_name.split('/')[-1].split('.')[0]
if sample_md5 in modified_files_mapping:
sample_filename = modified_files_mapping[sample_md5]
# print("{} maps to {}".format(sample_md5, sample_filename))
current_sample_filename = modified_files_mapping[sample_md5]
# print("{} maps to {}".format(sample_md5, current_sample_filename))
with zf.open(zip_file_name, mode='r', pwd=None) as fp:
file_data = fp.read()
results.append({
'values': sample_filename,
'values': current_sample_filename,
'data': base64.b64encode(file_data).decode(),
'type': 'malware-sample', 'categories': ['Artifacts dropped', 'Payload delivery'], 'to_ids': True, 'comment': ''})
@ -76,8 +76,18 @@ def handler(q=False):
file_data = fp.read()
analysis_json = json.loads(file_data.decode('utf-8'))
results += process_analysis_json(analysis_json)
# if 'sample' in zip_file_name:
# sample['data'] = base64.b64encode(file_data).decode()
try:
sample_filename = analysis_json.get('analysis').get('@filename')
if sample_filename:
with zf.open('sample', mode='r', pwd=None) as fp:
file_data = fp.read()
results.append({
'values': sample_filename,
'data': base64.b64encode(file_data).decode(),
'type': 'malware-sample', 'categories': ['Artifacts dropped', 'Payload delivery'], 'to_ids': True, 'comment': ''})
except Exception as e:
# no 'sample' in archive, might be an url analysis, just ignore
pass
else:
try:
@ -411,20 +421,22 @@ def cleanup_url(item):
def cleanup_filepath(item):
noise_substrings = {
'C:\\Windows\\Prefetch\\',
'\\AppData\\Roaming\\Microsoft\\Windows\\Recent\\',
'\\AppData\\Roaming\\Microsoft\\Office\\Recent\\',
'C:\\ProgramData\\Microsoft\\OfficeSoftwareProtectionPlatform\\Cache\\cache.dat',
'\\AppData\\Local\\Microsoft\\Windows\\Temporary Internet Files\\Content.',
'\\AppData\\Local\\Microsoft\\Internet Explorer\\Recovery\\High\\',
'\\AppData\\Local\\GDIPFONTCACHEV1.DAT',
'\\AppData\\Local\\Microsoft\\Internet Explorer\\DOMStore\\',
'\\AppData\\LocalLow\\Microsoft\\Internet Explorer\\Services\\search_',
'\\AppData\\Local\\Microsoft\\Windows\\History\\History.',
'\\AppData\\Roaming\\Microsoft\\Windows\\Cookies\\',
'\\AppData\\LocalLow\\Microsoft\\CryptnetUrlCache\\',
'\\AppData\\Local\\Microsoft\\Internet Explorer\\Recovery\\High\\',
'\\AppData\\Local\\Microsoft\\Windows\\Caches\\',
'\\AppData\\Local\\Microsoft\\Windows\WebCache\\',
'\\AppData\\Local\\Microsoft\\Windows\\Explorer\\thumbcache',
'\\AppData\\Local\\Microsoft\\Windows\\History\\History.',
'\\AppData\\Local\\Microsoft\\Windows\\Temporary Internet Files\\Content.',
'\\AppData\\Local\\Microsoft\\Windows\\WebCache\\',
'\\AppData\\Local\\Temp\\.*tmp$',
'\\AppData\\LocalLow\\Microsoft\\CryptnetUrlCache\\',
'\\AppData\\LocalLow\\Microsoft\\Internet Explorer\\Services\\search_',
'\\AppData\\Roaming\\Microsoft\\Office\\Recent\\',
'\\AppData\\Roaming\\Microsoft\\Windows\\Cookies\\',
'\\AppData\\Roaming\\Microsoft\\Windows\\Recent\\',
'C:\\ProgramData\\Microsoft\\OfficeSoftwareProtectionPlatform\\Cache\\cache.dat',
'C:\\Windows\\Prefetch\\',
'\\AppData\\Roaming\\Adobe\\Acrobat\\9.0\\SharedDataEvents-journal',
'\\AppData\\Roaming\\Adobe\\Acrobat\\9.0\\UserCache.bin',
@ -441,24 +453,27 @@ def cleanup_filepath(item):
def cleanup_regkey(item):
noise_substrings = {
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Installer\\UserData\\',
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\\',
r'\\CurrentVersion\\Explorer\\FileExts\\[a-z\.]+\\OpenWith',
r'\\CurrentVersion\\Explorer\\RecentDocs\\',
r'\\CurrentVersion\\Explorer\\UserAssist\\',
r'\\CurrentVersion\\Explorer\\FileExts\\[a-z\.]+\\OpenWith',
r'\\Software\\Microsoft\\Internet Explorer\\Main\\WindowsSearch',
r'\\Software\\Microsoft\\Office\\[0-9\.]+\\',
r'\\SOFTWARE\\Microsoft\\OfficeSoftwareProtectionPlatform\\',
r'\\Software\\Microsoft\\Office\\Common\\Smart Tag\\',
r'\\Usage\\SpellingAndGrammarFiles',
r'^HKLM\\Software\\Microsoft\\Tracing\\',
r'\\Local Settings\\Software\\Microsoft\\Windows\\Shell\\Bag',
r'\\Software\\Classes\\CLSID\\',
r'\\Software\\Classes\\Local Settings\\MuiCache\\',
r'\\Local Settings\\Software\\Microsoft\\Windows\\Shell\\Bag',
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\RunMRU\\'
r'\\Software\\Microsoft\\Internet Explorer\\Main\\WindowsSearch',
r'\\Software\\Microsoft\\Office\\[0-9\.]+\\',
r'\\Software\\Microsoft\\Office\\Common\\Smart Tag\\',
r'\\Software\\Microsoft\\OfficeSoftwareProtectionPlatform\\',
r'\\Software\\Microsoft\\Shared Tools\\Panose\\',
r'\\Software\\Microsoft\\Tracing\\',
r'\\Software\\Microsoft\\Tracing\\powershell_RASAPI32\\',
r'\\Software\\Microsoft\\Tracing\\powershell_RASMANCS\\',
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Action Center\\',
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\RunMRU\\',
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Installer\\UserData\\',
r'\\Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\\',
r'\\System\\CurrentControlSet\\Services\\RdyBoost\\',
r'\\Usage\\SpellingAndGrammarFiles'
}
item = item.replace('\\REGISTRY\\MACHINE\\', 'HKLM\\')
item = item.replace('\\REGISTRY\\USER\\', 'HKCU\\')
if list_in_string(noise_substrings, item, regex=True):
return None
return item

View File

@ -93,7 +93,10 @@ def handler(q=False):
analysis_data = vmrayDownloadAnalysis(api, analysis_id)
if analysis_data:
p = vmrayVtiPatterns(analysis_data["vti_patterns"])
if "analysis_vti_patterns" in analysis_data:
p = vmrayVtiPatterns(analysis_data["analysis_vti_patterns"])
else:
p = vmrayVtiPatterns(analysis_data["vti_patterns"])
if p and len(p["results"]) > 0:
vti_patterns_found = True
vmray_results = {'results': vmray_results["results"] + p["results"]}

1
tests/goamlexport.xml Normal file
View File

@ -0,0 +1 @@
<report><rentity_id>2510</rentity_id><submission_code>E</submission_code><report_code>STR</report_code><submission_date>2018-02-22T08:34:16+00:00</submission_date><currency_code_local>EUR</currency_code_local><transaction><transactionnumber>TW00000901</transactionnumber><transaction_location>1 Manners Street Wellington</transaction_location><transmode_code>BG</transmode_code><date_transaction>2015-12-01T10:03:00</date_transaction><amount_local>12345</amount_local><transaction_description>when it transacts</transaction_description><t_from><from_funds_code>E</from_funds_code><from_account><status_code>A</status_code><personal_account_type>A</personal_account_type><currency_code>EUR</currency_code><account>31032027088</account><swift>ATTBVI</swift><institution_name>The bank</institution_name><signatory><t_person><last_name>Nick</last_name><first_name>Pitt</first_name><title>Sir</title><birthdate>1993-09-25</birthdate><birth_place>Mulhouse, France</birth_place><gender>Male</gender><addresses><address><city>Paris</city><country_code>France</country_code></address></addresses></t_person></signatory></from_account><from_country>FRA</from_country></t_from><t_to_my_client><to_funds_code>K</to_funds_code><to_person><last_name>Michel</last_name><first_name>Jean</first_name><title>Himself</title><gender>Prefer not to say</gender><addresses><address><city>Luxembourg</city><country_code>Luxembourg</country_code></address></addresses></to_person><to_country>LUX</to_country></t_to_my_client></transaction></report>