mirror of https://github.com/MISP/PyMISP
Use MISPAbstract as a master class everywhere.
This is probably breaking everything.... ¯\_(ツ)_/¯pull/111/head
parent
2a5f3346e4
commit
da2b28a927
|
@ -4,7 +4,8 @@ try:
|
||||||
from .exceptions import PyMISPError, NewEventError, NewAttributeError, MissingDependency, NoURL, NoKey, InvalidMISPObject, UnknownMISPObjectTemplate # noqa
|
from .exceptions import PyMISPError, NewEventError, NewAttributeError, MissingDependency, NoURL, NoKey, InvalidMISPObject, UnknownMISPObjectTemplate # noqa
|
||||||
from .api import PyMISP # noqa
|
from .api import PyMISP # noqa
|
||||||
from .abstract import AbstractMISP, MISPEncode # noqa
|
from .abstract import AbstractMISP, MISPEncode # noqa
|
||||||
from .mispevent import MISPEvent, MISPAttribute, EncodeUpdate, EncodeFull, MISPObjectReference, MISPObjectAttribute, MISPObject, AbstractMISPObjectGenerator # noqa
|
from .mispevent import MISPEvent, MISPAttribute, MISPObjectReference, MISPObjectAttribute, MISPObject # noqa
|
||||||
|
from .tools import AbstractMISPObjectGenerator # noqa
|
||||||
from .tools import Neo4j # noqa
|
from .tools import Neo4j # noqa
|
||||||
from .tools import stix # noqa
|
from .tools import stix # noqa
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
|
@ -23,29 +23,28 @@ class MISPEncode(JSONEncoder):
|
||||||
@six.add_metaclass(abc.ABCMeta) # Remove that line when discarding python2 support.
|
@six.add_metaclass(abc.ABCMeta) # Remove that line when discarding python2 support.
|
||||||
class AbstractMISP(collections.MutableMapping):
|
class AbstractMISP(collections.MutableMapping):
|
||||||
|
|
||||||
attributes = None
|
__not_jsonable = []
|
||||||
|
|
||||||
def __init__(self):
|
@property
|
||||||
"""Initialize the list of class-level attributes to set in the JSON dump"""
|
def __properties(self):
|
||||||
# The attribute names will be set automatically by the schemas when we will have them.
|
to_return = []
|
||||||
if self.attributes is None:
|
for prop, value in vars(self).items():
|
||||||
raise NotImplementedError('{} must define attributes'.format(type(self).__name__))
|
if prop.startswith('_') or prop in self.__not_jsonable:
|
||||||
self.attributes = sorted(self.attributes)
|
continue
|
||||||
|
to_return.append(prop)
|
||||||
def __check_dict_key(self, key):
|
return to_return
|
||||||
if key not in self.attributes:
|
|
||||||
raise Exception('{} not a valid key in {}. Alowed keys: {}'.format(
|
|
||||||
key, type(self).__name__, ', '.join(self.attributes)))
|
|
||||||
return True
|
|
||||||
|
|
||||||
def from_dict(self, **kwargs):
|
def from_dict(self, **kwargs):
|
||||||
for attribute in self.attributes:
|
for prop, value in kwargs.items():
|
||||||
val = kwargs.pop(attribute, None)
|
if value is None:
|
||||||
if val is None:
|
|
||||||
continue
|
continue
|
||||||
setattr(self, attribute, val)
|
setattr(self, prop, value)
|
||||||
if kwargs:
|
|
||||||
raise Exception('Unused parameter(s): {}'.format(', '.join(kwargs.keys())))
|
def update_not_jsonable(self, *args):
|
||||||
|
self.__not_jsonable += args
|
||||||
|
|
||||||
|
def set_not_jsonable(self, *args):
|
||||||
|
self.__not_jsonable = args
|
||||||
|
|
||||||
def from_json(self, json_string):
|
def from_json(self, json_string):
|
||||||
"""Load a JSON string"""
|
"""Load a JSON string"""
|
||||||
|
@ -53,7 +52,7 @@ class AbstractMISP(collections.MutableMapping):
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
to_return = {}
|
to_return = {}
|
||||||
for attribute in self.attributes:
|
for attribute in self.__properties:
|
||||||
val = getattr(self, attribute, None)
|
val = getattr(self, attribute, None)
|
||||||
if val is None:
|
if val is None:
|
||||||
continue
|
continue
|
||||||
|
@ -67,16 +66,13 @@ class AbstractMISP(collections.MutableMapping):
|
||||||
return json.dumps(self.to_dict(), cls=MISPEncode)
|
return json.dumps(self.to_dict(), cls=MISPEncode)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
if self.__check_dict_key(key):
|
return getattr(self, key)
|
||||||
return getattr(self, key)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
if self.__check_dict_key(key):
|
setattr(self, key, value)
|
||||||
setattr(self, key, value)
|
|
||||||
|
|
||||||
def __delitem__(self, key):
|
def __delitem__(self, key):
|
||||||
if self.__check_dict_key(key):
|
delattr(self, key)
|
||||||
delattr(self, key)
|
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self.to_dict())
|
return iter(self.to_dict())
|
||||||
|
|
|
@ -37,7 +37,8 @@ except ImportError:
|
||||||
|
|
||||||
from . import __version__
|
from . import __version__
|
||||||
from .exceptions import PyMISPError, SearchError, MissingDependency, NoURL, NoKey
|
from .exceptions import PyMISPError, SearchError, MissingDependency, NoURL, NoKey
|
||||||
from .mispevent import MISPEvent, MISPAttribute, EncodeUpdate
|
from .mispevent import MISPEvent, MISPAttribute
|
||||||
|
from .abstract import MISPEncode
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -318,7 +319,7 @@ class PyMISP(object):
|
||||||
session = self.__prepare_session()
|
session = self.__prepare_session()
|
||||||
url = urljoin(self.root_url, 'events')
|
url = urljoin(self.root_url, 'events')
|
||||||
if isinstance(event, MISPEvent):
|
if isinstance(event, MISPEvent):
|
||||||
event = json.dumps(event, cls=EncodeUpdate)
|
event = json.dumps(event, cls=MISPEncode)
|
||||||
if isinstance(event, basestring):
|
if isinstance(event, basestring):
|
||||||
response = session.post(url, data=event)
|
response = session.post(url, data=event)
|
||||||
else:
|
else:
|
||||||
|
@ -334,7 +335,7 @@ class PyMISP(object):
|
||||||
session = self.__prepare_session()
|
session = self.__prepare_session()
|
||||||
url = urljoin(self.root_url, 'events/{}'.format(event_id))
|
url = urljoin(self.root_url, 'events/{}'.format(event_id))
|
||||||
if isinstance(event, MISPEvent):
|
if isinstance(event, MISPEvent):
|
||||||
event = json.dumps(event, cls=EncodeUpdate)
|
event = json.dumps(event, cls=MISPEncode)
|
||||||
if isinstance(event, basestring):
|
if isinstance(event, basestring):
|
||||||
response = session.post(url, data=event)
|
response = session.post(url, data=event)
|
||||||
else:
|
else:
|
||||||
|
@ -440,7 +441,7 @@ class PyMISP(object):
|
||||||
else:
|
else:
|
||||||
session = self.__prepare_session()
|
session = self.__prepare_session()
|
||||||
url = urljoin(self.root_url, 'attributes/add/{}'.format(eventID_to_update))
|
url = urljoin(self.root_url, 'attributes/add/{}'.format(eventID_to_update))
|
||||||
response = self._check_response(session.post(url, data=json.dumps(a, cls=EncodeUpdate)))
|
response = self._check_response(session.post(url, data=json.dumps(a, cls=MISPEncode)))
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def add_named_attribute(self, event, type_value, value, category=None, to_ids=False, comment=None, distribution=None, proposal=False, **kwargs):
|
def add_named_attribute(self, event, type_value, value, category=None, to_ids=False, comment=None, distribution=None, proposal=False, **kwargs):
|
||||||
|
@ -737,7 +738,7 @@ class PyMISP(object):
|
||||||
url = urljoin(self.root_url, 'shadow_attributes/{}/{}'.format(path, id))
|
url = urljoin(self.root_url, 'shadow_attributes/{}/{}'.format(path, id))
|
||||||
if path in ['add', 'edit']:
|
if path in ['add', 'edit']:
|
||||||
query = {'request': {'ShadowAttribute': attribute}}
|
query = {'request': {'ShadowAttribute': attribute}}
|
||||||
response = session.post(url, data=json.dumps(query, cls=EncodeUpdate))
|
response = session.post(url, data=json.dumps(query, cls=MISPEncode))
|
||||||
elif path == 'view':
|
elif path == 'view':
|
||||||
response = session.get(url)
|
response = session.get(url)
|
||||||
else: # accept or discard
|
else: # accept or discard
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 50fe0c2993304e72d82c6cbdadd1bca4013a030e
|
Subproject commit 96db4ae070e0a83340a17607e9f7ebbbb20e747b
|
|
@ -4,7 +4,6 @@
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
from json import JSONEncoder
|
|
||||||
import os
|
import os
|
||||||
import base64
|
import base64
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
@ -57,19 +56,26 @@ except NameError:
|
||||||
unicode = str
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
class MISPAttribute(object):
|
def _int_to_str(d):
|
||||||
|
# transform all integer back to string
|
||||||
|
for k, v in d.items():
|
||||||
|
if isinstance(v, (int, float)) and not isinstance(v, bool):
|
||||||
|
d[k] = str(v)
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class MISPAttribute(AbstractMISP):
|
||||||
|
|
||||||
def __init__(self, describe_types=None):
|
def __init__(self, describe_types=None):
|
||||||
if not describe_types:
|
if not describe_types:
|
||||||
self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
|
ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
|
||||||
with open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r') as f:
|
with open(os.path.join(ressources_path, 'describeTypes.json'), 'r') as f:
|
||||||
t = json.load(f)
|
t = json.load(f)
|
||||||
describe_types = t['result']
|
describe_types = t['result']
|
||||||
self.describe_types = describe_types
|
self.__categories = describe_types['categories']
|
||||||
self.categories = describe_types['categories']
|
self.__types = describe_types['types']
|
||||||
self.types = describe_types['types']
|
self.__category_type_mapping = describe_types['category_type_mappings']
|
||||||
self.category_type_mapping = describe_types['category_type_mappings']
|
self.__sane_default = describe_types['sane_defaults']
|
||||||
self.sane_default = describe_types['sane_defaults']
|
|
||||||
self._reinitialize_attribute()
|
self._reinitialize_attribute()
|
||||||
|
|
||||||
def _reinitialize_attribute(self):
|
def _reinitialize_attribute(self):
|
||||||
|
@ -137,72 +143,57 @@ class MISPAttribute(object):
|
||||||
|
|
||||||
def from_dict(self, **kwargs):
|
def from_dict(self, **kwargs):
|
||||||
if kwargs.get('type') and kwargs.get('category'):
|
if kwargs.get('type') and kwargs.get('category'):
|
||||||
if kwargs['type'] not in self.category_type_mapping[kwargs['category']]:
|
if kwargs['type'] not in self.__category_type_mapping[kwargs['category']]:
|
||||||
raise NewAttributeError('{} and {} is an invalid combination, type for this category has to be in {}'.format(kwargs.get('type'), kwargs.get('category'), (', '.join(self.category_type_mapping[kwargs['category']]))))
|
raise NewAttributeError('{} and {} is an invalid combination, type for this category has to be in {}'.format(
|
||||||
|
kwargs.get('type'), kwargs.get('category'), (', '.join(self.__category_type_mapping[kwargs['category']]))))
|
||||||
# Required
|
# Required
|
||||||
if kwargs.get('type'):
|
self.type = kwargs.pop('type', None)
|
||||||
self.type = kwargs['type']
|
if self.type is None:
|
||||||
if self.type not in self.types:
|
|
||||||
raise NewAttributeError('{} is invalid, type has to be in {}'.format(self.type, (', '.join(self.types))))
|
|
||||||
elif not self.type:
|
|
||||||
raise NewAttributeError('The type of the attribute is required.')
|
raise NewAttributeError('The type of the attribute is required.')
|
||||||
|
if self.type not in self.__types:
|
||||||
|
raise NewAttributeError('{} is invalid, type has to be in {}'.format(self.type, (', '.join(self.__types))))
|
||||||
|
|
||||||
type_defaults = self.sane_default[self.type]
|
type_defaults = self.__sane_default[self.type]
|
||||||
|
|
||||||
self.value = kwargs.get('value')
|
|
||||||
|
|
||||||
|
self.value = kwargs.pop('value', None)
|
||||||
if self.value is None:
|
if self.value is None:
|
||||||
raise NewAttributeError('The value of the attribute is required.')
|
raise NewAttributeError('The value of the attribute is required.')
|
||||||
|
|
||||||
# Default values
|
# Default values
|
||||||
if kwargs.get('category'):
|
self.category = kwargs.pop('category', type_defaults['default_category'])
|
||||||
self.category = kwargs['category']
|
if self.category not in self.__categories:
|
||||||
if self.category not in self.categories:
|
raise NewAttributeError('{} is invalid, category has to be in {}'.format(self.category, (', '.join(self.__categories))))
|
||||||
raise NewAttributeError('{} is invalid, category has to be in {}'.format(self.category, (', '.join(self.categories))))
|
|
||||||
else:
|
|
||||||
self.category = type_defaults['default_category']
|
|
||||||
|
|
||||||
self.to_ids = kwargs.get('to_ids')
|
self.to_ids = kwargs.pop('to_ids', bool(int(type_defaults['to_ids'])))
|
||||||
if self.to_ids is None:
|
|
||||||
self.to_ids = bool(int(type_defaults['to_ids']))
|
|
||||||
if not isinstance(self.to_ids, bool):
|
if not isinstance(self.to_ids, bool):
|
||||||
raise NewAttributeError('{} is invalid, to_ids has to be True or False'.format(self.to_ids))
|
raise NewAttributeError('{} is invalid, to_ids has to be True or False'.format(self.to_ids))
|
||||||
|
|
||||||
if kwargs.get('comment'):
|
|
||||||
self.comment = kwargs['comment']
|
|
||||||
if kwargs.get('distribution') is not None:
|
if kwargs.get('distribution') is not None:
|
||||||
self.distribution = int(kwargs['distribution'])
|
self.distribution = int(kwargs.pop('distribution'))
|
||||||
if self.distribution not in [0, 1, 2, 3, 4, 5]:
|
if self.distribution not in [0, 1, 2, 3, 4, 5]:
|
||||||
raise NewAttributeError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4, 5'.format(self.distribution))
|
raise NewAttributeError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4, 5'.format(self.distribution))
|
||||||
|
|
||||||
# other possible values
|
# other possible values
|
||||||
if kwargs.get('data'):
|
if kwargs.get('data'):
|
||||||
self.data = kwargs['data']
|
self.data = kwargs.pop('data')
|
||||||
self._load_data()
|
self._load_data()
|
||||||
if kwargs.get('id'):
|
if kwargs.get('id'):
|
||||||
self.id = int(kwargs['id'])
|
self.id = int(kwargs.pop('id'))
|
||||||
if kwargs.get('uuid'):
|
|
||||||
self.uuid = kwargs['uuid']
|
|
||||||
if kwargs.get('timestamp'):
|
if kwargs.get('timestamp'):
|
||||||
self.timestamp = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=int(kwargs['timestamp']))
|
self.timestamp = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=int(kwargs.pop('timestamp')))
|
||||||
if kwargs.get('sharing_group_id'):
|
if kwargs.get('sharing_group_id'):
|
||||||
self.sharing_group_id = int(kwargs['sharing_group_id'])
|
self.sharing_group_id = int(kwargs.pop('sharing_group_id'))
|
||||||
if kwargs.get('deleted'):
|
|
||||||
self.deleted = kwargs['deleted']
|
|
||||||
if kwargs.get('SharingGroup'):
|
|
||||||
self.SharingGroup = kwargs['SharingGroup']
|
|
||||||
if kwargs.get('ShadowAttribute'):
|
|
||||||
self.ShadowAttribute = kwargs['ShadowAttribute']
|
|
||||||
if kwargs.get('sig'):
|
|
||||||
self.sig = kwargs['sig']
|
|
||||||
if kwargs.get('Tag'):
|
if kwargs.get('Tag'):
|
||||||
self.Tag = [t for t in kwargs['Tag'] if t]
|
self.Tag = [t for t in kwargs.pop('Tag', []) if t]
|
||||||
|
|
||||||
# If the user wants to disable correlation, let them. Defaults to False.
|
# If the user wants to disable correlation, let them. Defaults to False.
|
||||||
self.disable_correlation = kwargs.get("disable_correlation", False)
|
self.disable_correlation = kwargs.pop("disable_correlation", False)
|
||||||
if self.disable_correlation is None:
|
if self.disable_correlation is None:
|
||||||
self.disable_correlation = False
|
self.disable_correlation = False
|
||||||
|
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
def _prepare_new_malware_sample(self):
|
def _prepare_new_malware_sample(self):
|
||||||
if '|' in self.value:
|
if '|' in self.value:
|
||||||
# Get the filename, ignore the md5, because humans.
|
# Get the filename, ignore the md5, because humans.
|
||||||
|
@ -237,87 +228,32 @@ class MISPAttribute(object):
|
||||||
# DEPRECATED
|
# DEPRECATED
|
||||||
return self.to_dict()
|
return self.to_dict()
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self, with_timestamp=False):
|
||||||
to_return = {'type': self.type, 'category': self.category, 'to_ids': self.to_ids,
|
to_return = super(MISPAttribute, self).to_dict()
|
||||||
'distribution': self.distribution, 'value': self.value,
|
if to_return.get('data'):
|
||||||
'comment': self.comment, 'disable_correlation': self.disable_correlation}
|
|
||||||
if self.uuid:
|
|
||||||
to_return['uuid'] = self.uuid
|
|
||||||
if self.sig:
|
|
||||||
to_return['sig'] = self.sig
|
|
||||||
if self.sharing_group_id:
|
|
||||||
to_return['sharing_group_id'] = self.sharing_group_id
|
|
||||||
if self.Tag:
|
|
||||||
to_return['Tag'] = self.Tag
|
|
||||||
if self.data:
|
|
||||||
to_return['data'] = base64.b64encode(self.data.getvalue()).decode()
|
to_return['data'] = base64.b64encode(self.data.getvalue()).decode()
|
||||||
if self.encrypt:
|
if with_timestamp and to_return.get('timestamp'):
|
||||||
to_return['encrypt'] = self.encrypt
|
|
||||||
to_return = _int_to_str(to_return)
|
|
||||||
return to_return
|
|
||||||
|
|
||||||
def _json_full(self):
|
|
||||||
to_return = self._json()
|
|
||||||
if self.id:
|
|
||||||
to_return['id'] = self.id
|
|
||||||
if self.timestamp:
|
|
||||||
# Should never be set on an update, MISP will automatically set it to now
|
|
||||||
to_return['timestamp'] = int(time.mktime(self.timestamp.timetuple()))
|
to_return['timestamp'] = int(time.mktime(self.timestamp.timetuple()))
|
||||||
if self.deleted is not None:
|
else:
|
||||||
to_return['deleted'] = self.deleted
|
to_return.pop('timestamp', None)
|
||||||
if self.ShadowAttribute:
|
|
||||||
to_return['ShadowAttribute'] = self.ShadowAttribute
|
|
||||||
if self.SharingGroup:
|
|
||||||
to_return['SharingGroup'] = self.SharingGroup
|
|
||||||
to_return = _int_to_str(to_return)
|
to_return = _int_to_str(to_return)
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
|
|
||||||
class EncodeUpdate(JSONEncoder):
|
class MISPEvent(AbstractMISP):
|
||||||
def default(self, obj):
|
|
||||||
try:
|
|
||||||
return obj._json()
|
|
||||||
except AttributeError:
|
|
||||||
return JSONEncoder.default(self, obj)
|
|
||||||
|
|
||||||
|
|
||||||
class EncodeFull(JSONEncoder):
|
|
||||||
def default(self, obj):
|
|
||||||
try:
|
|
||||||
return obj._json_full()
|
|
||||||
except AttributeError:
|
|
||||||
return JSONEncoder.default(self, obj)
|
|
||||||
|
|
||||||
|
|
||||||
def _int_to_str(d):
|
|
||||||
# transform all integer back to string
|
|
||||||
for k, v in d.items():
|
|
||||||
if isinstance(v, (int, float)) and not isinstance(v, bool):
|
|
||||||
d[k] = str(v)
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
class MISPEvent(object):
|
|
||||||
|
|
||||||
def __init__(self, describe_types=None, strict_validation=False):
|
def __init__(self, describe_types=None, strict_validation=False):
|
||||||
self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
|
ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
|
||||||
if strict_validation:
|
if strict_validation:
|
||||||
with open(os.path.join(self.ressources_path, 'schema.json'), 'r') as f:
|
with open(os.path.join(ressources_path, 'schema.json'), 'r') as f:
|
||||||
self.json_schema = json.load(f)
|
self.__json_schema = json.load(f)
|
||||||
else:
|
else:
|
||||||
with open(os.path.join(self.ressources_path, 'schema-lax.json'), 'r') as f:
|
with open(os.path.join(ressources_path, 'schema-lax.json'), 'r') as f:
|
||||||
self.json_schema = json.load(f)
|
self.__json_schema = json.load(f)
|
||||||
if not describe_types:
|
if not describe_types:
|
||||||
with open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r') as f:
|
with open(os.path.join(ressources_path, 'describeTypes.json'), 'r') as f:
|
||||||
t = json.load(f)
|
t = json.load(f)
|
||||||
describe_types = t['result']
|
describe_types = t['result']
|
||||||
self.describe_types = describe_types
|
|
||||||
self.categories = describe_types['categories']
|
|
||||||
self.types = describe_types['types']
|
|
||||||
self.category_type_mapping = describe_types['category_type_mappings']
|
|
||||||
self.sane_default = describe_types['sane_defaults']
|
|
||||||
self.new = True
|
|
||||||
self.dump_full = False
|
|
||||||
|
|
||||||
self._reinitialize_event()
|
self._reinitialize_event()
|
||||||
|
|
||||||
|
@ -416,8 +352,6 @@ class MISPEvent(object):
|
||||||
self.load(f)
|
self.load(f)
|
||||||
|
|
||||||
def load(self, json_event):
|
def load(self, json_event):
|
||||||
self.new = False
|
|
||||||
self.dump_full = True
|
|
||||||
if hasattr(json_event, 'read'):
|
if hasattr(json_event, 'read'):
|
||||||
# python2 and python3 compatible to find if we have a file
|
# python2 and python3 compatible to find if we have a file
|
||||||
json_event = json_event.read()
|
json_event = json_event.read()
|
||||||
|
@ -432,7 +366,7 @@ class MISPEvent(object):
|
||||||
# Invalid event created by MISP up to 2.4.52 (attribute_count is none instead of '0')
|
# Invalid event created by MISP up to 2.4.52 (attribute_count is none instead of '0')
|
||||||
if event.get('Event') and event.get('Event').get('attribute_count') is None:
|
if event.get('Event') and event.get('Event').get('attribute_count') is None:
|
||||||
event['Event']['attribute_count'] = '0'
|
event['Event']['attribute_count'] = '0'
|
||||||
jsonschema.validate(event, self.json_schema)
|
jsonschema.validate(event, self.__json_schema)
|
||||||
e = event.get('Event')
|
e = event.get('Event')
|
||||||
self._reinitialize_event()
|
self._reinitialize_event()
|
||||||
self.set_all_values(**e)
|
self.set_all_values(**e)
|
||||||
|
@ -451,150 +385,100 @@ class MISPEvent(object):
|
||||||
raise NewEventError('Invalid format for the date: {} - {}'.format(date, type(date)))
|
raise NewEventError('Invalid format for the date: {} - {}'.format(date, type(date)))
|
||||||
|
|
||||||
def set_all_values(self, **kwargs):
|
def set_all_values(self, **kwargs):
|
||||||
|
# to be deprecated
|
||||||
|
self.from_dict(**kwargs)
|
||||||
|
|
||||||
|
def from_dict(self, **kwargs):
|
||||||
# Required value
|
# Required value
|
||||||
if kwargs.get('info'):
|
self.info = kwargs.pop('info', None)
|
||||||
self.info = kwargs['info']
|
if not self.info:
|
||||||
elif not self.info:
|
|
||||||
raise NewAttributeError('The info field of the new event is required.')
|
raise NewAttributeError('The info field of the new event is required.')
|
||||||
|
|
||||||
# Default values for a valid event to send to a MISP instance
|
# Default values for a valid event to send to a MISP instance
|
||||||
if kwargs.get('distribution') is not None:
|
if kwargs.get('distribution') is not None:
|
||||||
self.distribution = int(kwargs['distribution'])
|
self.distribution = int(kwargs.pop('distribution'))
|
||||||
if self.distribution not in [0, 1, 2, 3, 4]:
|
if self.distribution not in [0, 1, 2, 3, 4]:
|
||||||
raise NewEventError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4'.format(self.distribution))
|
raise NewAttributeError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4'.format(self.distribution))
|
||||||
|
|
||||||
if kwargs.get('threat_level_id') is not None:
|
if kwargs.get('threat_level_id') is not None:
|
||||||
self.threat_level_id = int(kwargs['threat_level_id'])
|
self.threat_level_id = int(kwargs.pop('threat_level_id'))
|
||||||
if self.threat_level_id not in [1, 2, 3, 4]:
|
if self.threat_level_id not in [1, 2, 3, 4]:
|
||||||
raise NewEventError('{} is invalid, the threat_level has to be in 1, 2, 3, 4'.format(self.threat_level_id))
|
raise NewEventError('{} is invalid, the threat_level has to be in 1, 2, 3, 4'.format(self.threat_level_id))
|
||||||
|
|
||||||
if kwargs.get('analysis') is not None:
|
if kwargs.get('analysis') is not None:
|
||||||
self.analysis = int(kwargs['analysis'])
|
self.analysis = int(kwargs.pop('analysis'))
|
||||||
if self.analysis not in [0, 1, 2]:
|
if self.analysis not in [0, 1, 2]:
|
||||||
raise NewEventError('{} is invalid, the analysis has to be in 0, 1, 2'.format(self.analysis))
|
raise NewEventError('{} is invalid, the analysis has to be in 0, 1, 2'.format(self.analysis))
|
||||||
if kwargs.get('published') is not None:
|
|
||||||
self.unpublish()
|
self.published = kwargs.pop('published', None)
|
||||||
if kwargs.get("published") is True:
|
if self.published is True:
|
||||||
self.publish()
|
self.publish()
|
||||||
|
else:
|
||||||
|
self.unpublish()
|
||||||
|
|
||||||
if kwargs.get('date'):
|
if kwargs.get('date'):
|
||||||
self.set_date(kwargs['date'])
|
self.set_date(kwargs.pop('date'))
|
||||||
if kwargs.get('Attribute'):
|
if kwargs.get('Attribute'):
|
||||||
for a in kwargs['Attribute']:
|
for a in kwargs.pop('Attribute'):
|
||||||
attribute = MISPAttribute(self.describe_types)
|
attribute = MISPAttribute()
|
||||||
attribute.set_all_values(**a)
|
attribute.set_all_values(**a)
|
||||||
self.attributes.append(attribute)
|
self.attributes.append(attribute)
|
||||||
|
|
||||||
# All other keys
|
# All other keys
|
||||||
if kwargs.get('id'):
|
if kwargs.get('id'):
|
||||||
self.id = int(kwargs['id'])
|
self.id = int(kwargs.pop('id'))
|
||||||
if kwargs.get('orgc_id'):
|
if kwargs.get('orgc_id'):
|
||||||
self.orgc_id = int(kwargs['orgc_id'])
|
self.orgc_id = int(kwargs.pop('orgc_id'))
|
||||||
if kwargs.get('org_id'):
|
if kwargs.get('org_id'):
|
||||||
self.org_id = int(kwargs['org_id'])
|
self.org_id = int(kwargs.pop('org_id'))
|
||||||
if kwargs.get('uuid'):
|
|
||||||
self.uuid = kwargs['uuid']
|
|
||||||
if kwargs.get('attribute_count'):
|
|
||||||
self.attribute_count = int(kwargs['attribute_count'])
|
|
||||||
if kwargs.get('timestamp'):
|
if kwargs.get('timestamp'):
|
||||||
self.timestamp = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=int(kwargs['timestamp']))
|
self.timestamp = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=int(kwargs.pop('timestamp')))
|
||||||
if kwargs.get('proposal_email_lock'):
|
|
||||||
self.proposal_email_lock = kwargs['proposal_email_lock']
|
|
||||||
if kwargs.get('locked'):
|
|
||||||
self.locked = kwargs['locked']
|
|
||||||
if kwargs.get('publish_timestamp'):
|
if kwargs.get('publish_timestamp'):
|
||||||
self.publish_timestamp = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=int(kwargs['publish_timestamp']))
|
self.publish_timestamp = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=int(kwargs.pop('publish_timestamp')))
|
||||||
if kwargs.get('sharing_group_id'):
|
if kwargs.get('sharing_group_id'):
|
||||||
self.sharing_group_id = int(kwargs['sharing_group_id'])
|
self.sharing_group_id = int(kwargs.pop('sharing_group_id'))
|
||||||
if kwargs.get('Org'):
|
|
||||||
self.Org = kwargs['Org']
|
|
||||||
if kwargs.get('Orgc'):
|
|
||||||
self.Orgc = kwargs['Orgc']
|
|
||||||
if kwargs.get('ShadowAttribute'):
|
|
||||||
self.ShadowAttribute = kwargs['ShadowAttribute']
|
|
||||||
if kwargs.get('RelatedEvent'):
|
if kwargs.get('RelatedEvent'):
|
||||||
self.RelatedEvent = []
|
self.RelatedEvent = []
|
||||||
for rel_event in kwargs['RelatedEvent']:
|
for rel_event in kwargs.pop('RelatedEvent'):
|
||||||
sub_event = MISPEvent()
|
sub_event = MISPEvent()
|
||||||
sub_event.load(rel_event)
|
sub_event.load(rel_event)
|
||||||
self.RelatedEvent.append(sub_event)
|
self.RelatedEvent.append(sub_event)
|
||||||
if kwargs.get('Galaxy'):
|
|
||||||
self.Galaxy = kwargs['Galaxy']
|
|
||||||
if kwargs.get('Tag'):
|
if kwargs.get('Tag'):
|
||||||
self.Tag = [t for t in kwargs['Tag'] if t]
|
self.Tag = [t for t in kwargs.pop('Tag', []) if t]
|
||||||
if kwargs.get('sig'):
|
|
||||||
self.sig = kwargs['sig']
|
|
||||||
if kwargs.get('global_sig'):
|
|
||||||
self.global_sig = kwargs['global_sig']
|
|
||||||
if kwargs.get('Object'):
|
if kwargs.get('Object'):
|
||||||
self.Object = []
|
self.Object = []
|
||||||
for obj in kwargs['Object']:
|
for obj in kwargs.pop('Object'):
|
||||||
tmp_object = MISPObject(obj['name'])
|
tmp_object = MISPObject(obj['name'])
|
||||||
tmp_object.from_dict(**obj)
|
tmp_object.from_dict(**obj)
|
||||||
self.Object.append(tmp_object)
|
self.Object.append(tmp_object)
|
||||||
|
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
def _json(self):
|
def _json(self):
|
||||||
# DEPTECATED
|
# DEPTECATED
|
||||||
return self.to_dict()
|
return self.to_dict()
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self, with_timestamp=False):
|
||||||
to_return = {'Event': {}}
|
to_return = super(MISPEvent, self).to_dict()
|
||||||
to_return['Event'] = {'distribution': self.distribution, 'info': self.info,
|
if to_return.get('date'):
|
||||||
'date': self.date.isoformat(), 'published': self.published,
|
to_return['date'] = self.date.isoformat()
|
||||||
'threat_level_id': self.threat_level_id,
|
if to_return.get('attributes'):
|
||||||
'analysis': self.analysis, 'Attribute': []}
|
attributes = to_return.pop('attributes')
|
||||||
if self.sig:
|
to_return['Attribute'] = [attribute.to_dict(with_timestamp) for attribute in attributes]
|
||||||
to_return['Event']['sig'] = self.sig
|
if to_return.get('RelatedEvent'):
|
||||||
if self.global_sig:
|
to_return['RelatedEvent'] = [rel_event.to_dict() for rel_event in self.RelatedEvent]
|
||||||
to_return['Event']['global_sig'] = self.global_sig
|
if with_timestamp and to_return.get('timestamp'):
|
||||||
if self.uuid:
|
to_return['timestamp'] = int(time.mktime(self.timestamp.timetuple()))
|
||||||
to_return['Event']['uuid'] = self.uuid
|
else:
|
||||||
if self.Tag:
|
to_return.pop('timestamp', None)
|
||||||
to_return['Event']['Tag'] = self.Tag
|
if with_timestamp and to_return.get('publish_timestamp'):
|
||||||
if self.Orgc:
|
to_return['publish_timestamp'] = int(time.mktime(self.publish_timestamp.timetuple()))
|
||||||
to_return['Event']['Orgc'] = self.Orgc
|
else:
|
||||||
if self.Galaxy:
|
to_return.pop('publish_timestamp', None)
|
||||||
to_return['Event']['Galaxy'] = self.Galaxy
|
to_return = _int_to_str(to_return)
|
||||||
if self.sharing_group_id:
|
to_return = {'Event': to_return}
|
||||||
to_return['Event']['sharing_group_id'] = self.sharing_group_id
|
jsonschema.validate(to_return, self.__json_schema)
|
||||||
to_return['Event'] = _int_to_str(to_return['Event'])
|
|
||||||
if self.attributes:
|
|
||||||
to_return['Event']['Attribute'] = [a._json() for a in self.attributes]
|
|
||||||
jsonschema.validate(to_return, self.json_schema)
|
|
||||||
return to_return
|
|
||||||
|
|
||||||
def _json_full(self):
|
|
||||||
to_return = self._json()
|
|
||||||
if self.id:
|
|
||||||
to_return['Event']['id'] = self.id
|
|
||||||
if self.orgc_id:
|
|
||||||
to_return['Event']['orgc_id'] = self.orgc_id
|
|
||||||
if self.org_id:
|
|
||||||
to_return['Event']['org_id'] = self.org_id
|
|
||||||
if self.locked is not None:
|
|
||||||
to_return['Event']['locked'] = self.locked
|
|
||||||
if self.attribute_count is not None:
|
|
||||||
to_return['Event']['attribute_count'] = self.attribute_count
|
|
||||||
if self.RelatedEvent:
|
|
||||||
to_return['Event']['RelatedEvent'] = []
|
|
||||||
for rel_event in self.RelatedEvent:
|
|
||||||
to_return['Event']['RelatedEvent'].append(rel_event._json_full())
|
|
||||||
if self.Org:
|
|
||||||
to_return['Event']['Org'] = self.Org
|
|
||||||
if self.sharing_group_id:
|
|
||||||
to_return['Event']['sharing_group_id'] = self.sharing_group_id
|
|
||||||
if self.ShadowAttribute:
|
|
||||||
to_return['Event']['ShadowAttribute'] = self.ShadowAttribute
|
|
||||||
if self.proposal_email_lock is not None:
|
|
||||||
to_return['Event']['proposal_email_lock'] = self.proposal_email_lock
|
|
||||||
if self.locked is not None:
|
|
||||||
to_return['Event']['locked'] = self.locked
|
|
||||||
if self.publish_timestamp:
|
|
||||||
to_return['Event']['publish_timestamp'] = int(time.mktime(self.publish_timestamp.timetuple()))
|
|
||||||
if self.timestamp:
|
|
||||||
# Should never be set on an update, MISP will automatically set it to now
|
|
||||||
to_return['Event']['timestamp'] = int(time.mktime(self.timestamp.timetuple()))
|
|
||||||
to_return['Event'] = _int_to_str(to_return['Event'])
|
|
||||||
if self.attributes:
|
|
||||||
to_return['Event']['Attribute'] = [a._json_full() for a in self.attributes]
|
|
||||||
jsonschema.validate(to_return, self.json_schema)
|
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
def add_tag(self, tag):
|
def add_tag(self, tag):
|
||||||
|
@ -627,7 +511,7 @@ class MISPEvent(object):
|
||||||
raise Exception('No attribute with UUID/ID {} found.'.format(attribute_id))
|
raise Exception('No attribute with UUID/ID {} found.'.format(attribute_id))
|
||||||
|
|
||||||
def add_attribute(self, type, value, **kwargs):
|
def add_attribute(self, type, value, **kwargs):
|
||||||
attribute = MISPAttribute(self.describe_types)
|
attribute = MISPAttribute()
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
for a in value:
|
for a in value:
|
||||||
self.add_attribute(type, a, **kwargs)
|
self.add_attribute(type, a, **kwargs)
|
||||||
|
@ -638,8 +522,6 @@ class MISPEvent(object):
|
||||||
|
|
||||||
class MISPObjectReference(AbstractMISP):
|
class MISPObjectReference(AbstractMISP):
|
||||||
|
|
||||||
attributes = ['source_uuid', 'referenced_uuid', 'relationship_type', 'comment', 'uuid', 'deleted']
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(MISPObjectReference, self).__init__()
|
super(MISPObjectReference, self).__init__()
|
||||||
|
|
||||||
|
@ -652,17 +534,11 @@ class MISPObjectReference(AbstractMISP):
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
|
||||||
class MISPObjectAttribute(MISPAttribute, AbstractMISP):
|
class MISPObjectAttribute(MISPAttribute):
|
||||||
|
|
||||||
# This list is very limited and hardcoded to fit the current needs (file/pe/pesection creation): MISPAttriute will follow the
|
|
||||||
# same spec and just add one attribute: object_relation
|
|
||||||
attributes = ['object_relation', 'value', 'type', 'category', 'disable_correlation', 'to_ids',
|
|
||||||
'data', 'encrypt', 'distribution', 'comment', 'uuid', 'event_id']
|
|
||||||
|
|
||||||
def __init__(self, definition):
|
def __init__(self, definition):
|
||||||
MISPAttribute.__init__(self)
|
super(MISPAttribute, self).__init__()
|
||||||
AbstractMISP.__init__(self)
|
self.__definition = definition
|
||||||
self.definition = definition
|
|
||||||
|
|
||||||
def from_dict(self, object_relation, value, **kwargs):
|
def from_dict(self, object_relation, value, **kwargs):
|
||||||
self.object_relation = object_relation
|
self.object_relation = object_relation
|
||||||
|
@ -671,46 +547,43 @@ class MISPObjectAttribute(MISPAttribute, AbstractMISP):
|
||||||
# Get the misp attribute type from the definition
|
# Get the misp attribute type from the definition
|
||||||
self.type = kwargs.pop('type', None)
|
self.type = kwargs.pop('type', None)
|
||||||
if self.type is None:
|
if self.type is None:
|
||||||
self.type = self.definition.get('misp-attribute')
|
self.type = self.__definition.get('misp-attribute')
|
||||||
self.disable_correlation = kwargs.pop('disable_correlation', None)
|
self.disable_correlation = kwargs.pop('disable_correlation', None)
|
||||||
if self.disable_correlation is None:
|
if self.disable_correlation is None:
|
||||||
# The correlation can be disabled by default in the object definition.
|
# The correlation can be disabled by default in the object definition.
|
||||||
# Use this value if it isn't overloaded by the object
|
# Use this value if it isn't overloaded by the object
|
||||||
self.disable_correlation = self.definition.get('disable_correlation')
|
self.disable_correlation = self.__definition.get('disable_correlation')
|
||||||
self.to_ids = kwargs.pop('to_ids', None)
|
self.to_ids = kwargs.pop('to_ids', None)
|
||||||
if self.to_ids is None:
|
if self.to_ids is None:
|
||||||
# Same for the to_ids flag
|
# Same for the to_ids flag
|
||||||
self.to_ids = self.definition.get('to_ids')
|
self.to_ids = self.__definition.get('to_ids')
|
||||||
# FIXME: dirty hack until all the classes are ported to the new format but we get the default values
|
|
||||||
kwargs.update(**self)
|
kwargs.update(**self)
|
||||||
MISPAttribute.from_dict(self, **kwargs)
|
super(MISPAttribute, self).from_dict(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class MISPObject(AbstractMISP):
|
class MISPObject(AbstractMISP):
|
||||||
|
|
||||||
attributes = ['name', 'meta-category', 'uuid', 'description', 'template_version', 'template_uuid', 'Attribute']
|
|
||||||
|
|
||||||
def __init__(self, name, strict=True):
|
def __init__(self, name, strict=True):
|
||||||
super(MISPObject, self).__init__()
|
super(MISPObject, self).__init__()
|
||||||
self.strict = strict
|
self.__strict = strict
|
||||||
self.name = name
|
self.name = name
|
||||||
self.misp_objects_path = os.path.join(
|
self.__misp_objects_path = os.path.join(
|
||||||
os.path.abspath(os.path.dirname(sys.modules['pymisp'].__file__)),
|
os.path.abspath(os.path.dirname(sys.modules['pymisp'].__file__)),
|
||||||
'data', 'misp-objects', 'objects')
|
'data', 'misp-objects', 'objects')
|
||||||
if os.path.exists(os.path.join(self.misp_objects_path, self.name, 'definition.json')):
|
if os.path.exists(os.path.join(self.__misp_objects_path, self.name, 'definition.json')):
|
||||||
self.known_template = True
|
self.__known_template = True
|
||||||
else:
|
else:
|
||||||
if self.strict:
|
if self.__strict:
|
||||||
raise UnknownMISPObjectTemplate('{} is unknown in the MISP object directory.')
|
raise UnknownMISPObjectTemplate('{} is unknown in the MISP object directory.')
|
||||||
else:
|
else:
|
||||||
self.known_template = False
|
self.__known_template = False
|
||||||
if self.known_template:
|
if self.__known_template:
|
||||||
with open(os.path.join(self.misp_objects_path, self.name, 'definition.json'), 'r') as f:
|
with open(os.path.join(self.__misp_objects_path, self.name, 'definition.json'), 'r') as f:
|
||||||
self.definition = json.load(f)
|
self.__definition = json.load(f)
|
||||||
setattr(self, 'meta-category', self.definition['meta-category'])
|
setattr(self, 'meta-category', self.__definition['meta-category'])
|
||||||
self.template_uuid = self.definition['uuid']
|
self.template_uuid = self.__definition['uuid']
|
||||||
self.description = self.definition['description']
|
self.description = self.__definition['description']
|
||||||
self.template_version = self.definition['version']
|
self.template_version = self.__definition['version']
|
||||||
else:
|
else:
|
||||||
# FIXME We need to set something for meta-category, template_uuid, description and template_version
|
# FIXME We need to set something for meta-category, template_uuid, description and template_version
|
||||||
pass
|
pass
|
||||||
|
@ -719,17 +592,17 @@ class MISPObject(AbstractMISP):
|
||||||
self.ObjectReference = []
|
self.ObjectReference = []
|
||||||
|
|
||||||
def from_dict(self, **kwargs):
|
def from_dict(self, **kwargs):
|
||||||
if self.known_template:
|
if self.__known_template:
|
||||||
if kwargs.get('template_uuid') and kwargs['template_uuid'] != self.template_uuid:
|
if kwargs.get('template_uuid') and kwargs['template_uuid'] != self.template_uuid:
|
||||||
if self.strict:
|
if self.__strict:
|
||||||
raise UnknownMISPObjectTemplate('UUID of the object is different from the one of the template.')
|
raise UnknownMISPObjectTemplate('UUID of the object is different from the one of the template.')
|
||||||
else:
|
else:
|
||||||
self.known_template = False
|
self.__known_template = False
|
||||||
if kwargs.get('template_version') and int(kwargs['template_version']) != self.template_version:
|
if kwargs.get('template_version') and int(kwargs['template_version']) != self.template_version:
|
||||||
if self.strict:
|
if self.strict:
|
||||||
raise UnknownMISPObjectTemplate('Version of the object ({}) is different from the one of the template ({}).'.format(kwargs['template_version'], self.template_version))
|
raise UnknownMISPObjectTemplate('Version of the object ({}) is different from the one of the template ({}).'.format(kwargs['template_version'], self.template_version))
|
||||||
else:
|
else:
|
||||||
self.known_template = False
|
self.__known_template = False
|
||||||
|
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
if key == 'Attribute':
|
if key == 'Attribute':
|
||||||
|
@ -742,12 +615,12 @@ class MISPObject(AbstractMISP):
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
def to_dict(self, strict=True):
|
def to_dict(self, strict=True):
|
||||||
if strict or self.strict and self.known_template:
|
if strict or self.__strict and self.__known_template:
|
||||||
self._validate()
|
self._validate()
|
||||||
return super(MISPObject, self).to_dict()
|
return super(MISPObject, self).to_dict()
|
||||||
|
|
||||||
def to_json(self, strict=True):
|
def to_json(self, strict=True):
|
||||||
if strict or self.strict and self.known_template:
|
if strict or self.__strict and self.__known_template:
|
||||||
self._validate()
|
self._validate()
|
||||||
return super(MISPObject, self).to_json()
|
return super(MISPObject, self).to_json()
|
||||||
|
|
||||||
|
@ -760,14 +633,14 @@ class MISPObject(AbstractMISP):
|
||||||
for key, counter in count_relations.items():
|
for key, counter in count_relations.items():
|
||||||
if counter == 1:
|
if counter == 1:
|
||||||
continue
|
continue
|
||||||
if not self.definition['attributes'][key].get('multiple'):
|
if not self.__definition['attributes'][key].get('multiple'):
|
||||||
raise InvalidMISPObject('Multiple occurrences of {} is not allowed'.format(key))
|
raise InvalidMISPObject('Multiple occurrences of {} is not allowed'.format(key))
|
||||||
all_attribute_names = set(count_relations.keys())
|
all_attribute_names = set(count_relations.keys())
|
||||||
if self.definition.get('requiredOneOf'):
|
if self.__definition.get('requiredOneOf'):
|
||||||
if not set(self.definition['requiredOneOf']) & all_attribute_names:
|
if not set(self.__definition['requiredOneOf']) & all_attribute_names:
|
||||||
raise InvalidMISPObject('At least one of the following attributes is required: {}'.format(', '.join(self.definition['requiredOneOf'])))
|
raise InvalidMISPObject('At least one of the following attributes is required: {}'.format(', '.join(self.__definition['requiredOneOf'])))
|
||||||
if self.definition.get('required'):
|
if self.__definition.get('required'):
|
||||||
for r in self.definition.get('required'):
|
for r in self.__definition.get('required'):
|
||||||
if r not in all_attribute_names:
|
if r not in all_attribute_names:
|
||||||
raise InvalidMISPObject('{} is required'.format(r))
|
raise InvalidMISPObject('{} is required'.format(r))
|
||||||
return True
|
return True
|
||||||
|
@ -788,8 +661,8 @@ class MISPObject(AbstractMISP):
|
||||||
def add_attribute(self, object_relation, **value):
|
def add_attribute(self, object_relation, **value):
|
||||||
if value.get('value') is None:
|
if value.get('value') is None:
|
||||||
return None
|
return None
|
||||||
if self.known_template:
|
if self.__known_template:
|
||||||
attribute = MISPObjectAttribute(self.definition['attributes'][object_relation])
|
attribute = MISPObjectAttribute(self.__definition['attributes'][object_relation])
|
||||||
else:
|
else:
|
||||||
attribute = MISPObjectAttribute({})
|
attribute = MISPObjectAttribute({})
|
||||||
attribute.from_dict(object_relation, **value)
|
attribute.from_dict(object_relation, **value)
|
||||||
|
|
|
@ -4,3 +4,4 @@ from .peobject import PEObject, PESectionObject # noqa
|
||||||
from .elfobject import ELFObject, ELFSectionObject # noqa
|
from .elfobject import ELFObject, ELFSectionObject # noqa
|
||||||
from .machoobject import MachOObject, MachOSectionObject # noqa
|
from .machoobject import MachOObject, MachOSectionObject # noqa
|
||||||
from .create_misp_object import make_binary_objects # noqa
|
from .create_misp_object import make_binary_objects # noqa
|
||||||
|
from .abstractgenerator import AbstractMISPObjectGenerator # noqa
|
||||||
|
|
|
@ -29,35 +29,35 @@ class ELFObject(AbstractMISPObjectGenerator):
|
||||||
raise ImportError('Please install lief, documentation here: https://github.com/lief-project/LIEF')
|
raise ImportError('Please install lief, documentation here: https://github.com/lief-project/LIEF')
|
||||||
if pseudofile:
|
if pseudofile:
|
||||||
if isinstance(pseudofile, BytesIO):
|
if isinstance(pseudofile, BytesIO):
|
||||||
self.elf = lief.ELF.parse(raw=pseudofile.getvalue())
|
self.__elf = lief.ELF.parse(raw=pseudofile.getvalue())
|
||||||
elif isinstance(pseudofile, bytes):
|
elif isinstance(pseudofile, bytes):
|
||||||
self.elf = lief.ELF.parse(raw=pseudofile)
|
self.__elf = lief.ELF.parse(raw=pseudofile)
|
||||||
else:
|
else:
|
||||||
raise Exception('Pseudo file can be BytesIO or bytes got {}'.format(type(pseudofile)))
|
raise Exception('Pseudo file can be BytesIO or bytes got {}'.format(type(pseudofile)))
|
||||||
elif filepath:
|
elif filepath:
|
||||||
self.elf = lief.ELF.parse(filepath)
|
self.__elf = lief.ELF.parse(filepath)
|
||||||
elif parsed:
|
elif parsed:
|
||||||
# Got an already parsed blob
|
# Got an already parsed blob
|
||||||
if isinstance(parsed, lief.ELF.Binary):
|
if isinstance(parsed, lief.ELF.Binary):
|
||||||
self.elf = parsed
|
self.__elf = parsed
|
||||||
else:
|
else:
|
||||||
raise Exception('Not a lief.ELF.Binary: {}'.format(type(parsed)))
|
raise Exception('Not a lief.ELF.Binary: {}'.format(type(parsed)))
|
||||||
# Python3 way
|
|
||||||
# super().__init__('elf')
|
|
||||||
super(ELFObject, self).__init__('elf')
|
super(ELFObject, self).__init__('elf')
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable('ObjectReference')
|
||||||
|
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
# General information
|
# General information
|
||||||
self.add_attribute('type', value=str(self.elf.header.file_type).split('.')[1])
|
self.add_attribute('type', value=str(self.__elf.header.file_type).split('.')[1])
|
||||||
self.add_attribute('entrypoint-address', value=self.elf.entrypoint)
|
self.add_attribute('entrypoint-address', value=self.__elf.entrypoint)
|
||||||
self.add_attribute('arch', value=str(self.elf.header.machine_type).split('.')[1])
|
self.add_attribute('arch', value=str(self.__elf.header.machine_type).split('.')[1])
|
||||||
self.add_attribute('os_abi', value=str(self.elf.header.identity_os_abi).split('.')[1])
|
self.add_attribute('os_abi', value=str(self.__elf.header.identity_os_abi).split('.')[1])
|
||||||
# Sections
|
# Sections
|
||||||
self.sections = []
|
self.sections = []
|
||||||
if self.elf.sections:
|
if self.__elf.sections:
|
||||||
pos = 0
|
pos = 0
|
||||||
for section in self.elf.sections:
|
for section in self.__elf.sections:
|
||||||
s = ELFSectionObject(section)
|
s = ELFSectionObject(section)
|
||||||
self.add_reference(s.uuid, 'included-in', 'Section {} of ELF'.format(pos))
|
self.add_reference(s.uuid, 'included-in', 'Section {} of ELF'.format(pos))
|
||||||
pos += 1
|
pos += 1
|
||||||
|
@ -71,21 +71,23 @@ class ELFSectionObject(AbstractMISPObjectGenerator):
|
||||||
# Python3 way
|
# Python3 way
|
||||||
# super().__init__('pe-section')
|
# super().__init__('pe-section')
|
||||||
super(ELFSectionObject, self).__init__('elf-section')
|
super(ELFSectionObject, self).__init__('elf-section')
|
||||||
self.section = section
|
self.__section = section
|
||||||
self.data = bytes(self.section.content)
|
self.__data = bytes(self.__section.content)
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable('ObjectReference')
|
||||||
|
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
self.add_attribute('name', value=self.section.name)
|
self.add_attribute('name', value=self.__section.name)
|
||||||
self.add_attribute('type', value=str(self.section.type).split('.')[1])
|
self.add_attribute('type', value=str(self.__section.type).split('.')[1])
|
||||||
for flag in self.section.flags_list:
|
for flag in self.__section.flags_list:
|
||||||
self.add_attribute('flag', value=str(flag).split('.')[1])
|
self.add_attribute('flag', value=str(flag).split('.')[1])
|
||||||
size = self.add_attribute('size-in-bytes', value=self.section.size)
|
size = self.add_attribute('size-in-bytes', value=self.__section.size)
|
||||||
if int(size.value) > 0:
|
if int(size.value) > 0:
|
||||||
self.add_attribute('entropy', value=self.section.entropy)
|
self.add_attribute('entropy', value=self.__section.entropy)
|
||||||
self.add_attribute('md5', value=md5(self.data).hexdigest())
|
self.add_attribute('md5', value=md5(self.__data).hexdigest())
|
||||||
self.add_attribute('sha1', value=sha1(self.data).hexdigest())
|
self.add_attribute('sha1', value=sha1(self.__data).hexdigest())
|
||||||
self.add_attribute('sha256', value=sha256(self.data).hexdigest())
|
self.add_attribute('sha256', value=sha256(self.__data).hexdigest())
|
||||||
self.add_attribute('sha512', value=sha512(self.data).hexdigest())
|
self.add_attribute('sha512', value=sha512(self.__data).hexdigest())
|
||||||
if HAS_PYDEEP:
|
if HAS_PYDEEP:
|
||||||
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.data).decode())
|
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.__data).decode())
|
||||||
|
|
|
@ -33,34 +33,36 @@ class FileObject(AbstractMISPObjectGenerator):
|
||||||
self.filepath = filepath
|
self.filepath = filepath
|
||||||
self.filename = os.path.basename(self.filepath)
|
self.filename = os.path.basename(self.filepath)
|
||||||
with open(filepath, 'rb') as f:
|
with open(filepath, 'rb') as f:
|
||||||
self.pseudofile = BytesIO(f.read())
|
self.__pseudofile = BytesIO(f.read())
|
||||||
elif pseudofile and isinstance(pseudofile, BytesIO):
|
elif pseudofile and isinstance(pseudofile, BytesIO):
|
||||||
# WARNING: lief.parse requires a path
|
# WARNING: lief.parse requires a path
|
||||||
self.filepath = None
|
self.filepath = None
|
||||||
self.pseudofile = pseudofile
|
self.__pseudofile = pseudofile
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
else:
|
else:
|
||||||
raise Exception('File buffer (BytesIO) or a path is required.')
|
raise Exception('File buffer (BytesIO) or a path is required.')
|
||||||
# PY3 way:
|
# PY3 way:
|
||||||
# super().__init__('file')
|
# super().__init__('file')
|
||||||
super(FileObject, self).__init__('file')
|
super(FileObject, self).__init__('file')
|
||||||
self.data = self.pseudofile.getvalue()
|
self.__data = self.__pseudofile.getvalue()
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable('ObjectReference')
|
||||||
|
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
self.add_attribute('filename', value=self.filename)
|
self.add_attribute('filename', value=self.filename)
|
||||||
size = self.add_attribute('size-in-bytes', value=len(self.data))
|
size = self.add_attribute('size-in-bytes', value=len(self.__data))
|
||||||
if int(size.value) > 0:
|
if int(size.value) > 0:
|
||||||
self.add_attribute('entropy', value=self.__entropy_H(self.data))
|
self.add_attribute('entropy', value=self.__entropy_H(self.__data))
|
||||||
self.add_attribute('md5', value=md5(self.data).hexdigest())
|
self.add_attribute('md5', value=md5(self.__data).hexdigest())
|
||||||
self.add_attribute('sha1', value=sha1(self.data).hexdigest())
|
self.add_attribute('sha1', value=sha1(self.__data).hexdigest())
|
||||||
self.add_attribute('sha256', value=sha256(self.data).hexdigest())
|
self.add_attribute('sha256', value=sha256(self.__data).hexdigest())
|
||||||
self.add_attribute('sha512', value=sha512(self.data).hexdigest())
|
self.add_attribute('sha512', value=sha512(self.__data).hexdigest())
|
||||||
self.add_attribute('malware-sample', value=self.filename, data=self.pseudofile)
|
self.add_attribute('malware-sample', value=self.filename, data=self.__pseudofile)
|
||||||
if HAS_MAGIC:
|
if HAS_MAGIC:
|
||||||
self.add_attribute('mimetype', value=magic.from_buffer(self.data))
|
self.add_attribute('mimetype', value=magic.from_buffer(self.__data))
|
||||||
if HAS_PYDEEP:
|
if HAS_PYDEEP:
|
||||||
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.data).decode())
|
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.__data).decode())
|
||||||
|
|
||||||
def __entropy_H(self, data):
|
def __entropy_H(self, data):
|
||||||
"""Calculate the entropy of a chunk of data."""
|
"""Calculate the entropy of a chunk of data."""
|
||||||
|
|
|
@ -29,35 +29,37 @@ class MachOObject(AbstractMISPObjectGenerator):
|
||||||
raise ImportError('Please install lief, documentation here: https://github.com/lief-project/LIEF')
|
raise ImportError('Please install lief, documentation here: https://github.com/lief-project/LIEF')
|
||||||
if pseudofile:
|
if pseudofile:
|
||||||
if isinstance(pseudofile, BytesIO):
|
if isinstance(pseudofile, BytesIO):
|
||||||
self.macho = lief.MachO.parse(raw=pseudofile.getvalue())
|
self.__macho = lief.MachO.parse(raw=pseudofile.getvalue())
|
||||||
elif isinstance(pseudofile, bytes):
|
elif isinstance(pseudofile, bytes):
|
||||||
self.macho = lief.MachO.parse(raw=pseudofile)
|
self.__macho = lief.MachO.parse(raw=pseudofile)
|
||||||
else:
|
else:
|
||||||
raise Exception('Pseudo file can be BytesIO or bytes got {}'.format(type(pseudofile)))
|
raise Exception('Pseudo file can be BytesIO or bytes got {}'.format(type(pseudofile)))
|
||||||
elif filepath:
|
elif filepath:
|
||||||
self.macho = lief.MachO.parse(filepath)
|
self.__macho = lief.MachO.parse(filepath)
|
||||||
elif parsed:
|
elif parsed:
|
||||||
# Got an already parsed blob
|
# Got an already parsed blob
|
||||||
if isinstance(parsed, lief.MachO.Binary):
|
if isinstance(parsed, lief.MachO.Binary):
|
||||||
self.macho = parsed
|
self.__macho = parsed
|
||||||
else:
|
else:
|
||||||
raise Exception('Not a lief.MachO.Binary: {}'.format(type(parsed)))
|
raise Exception('Not a lief.MachO.Binary: {}'.format(type(parsed)))
|
||||||
# Python3 way
|
# Python3 way
|
||||||
# super().__init__('elf')
|
# super().__init__('elf')
|
||||||
super(MachOObject, self).__init__('macho')
|
super(MachOObject, self).__init__('macho')
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable(['ObjectReference'])
|
||||||
|
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
self.add_attribute('type', value=str(self.macho.header.file_type).split('.')[1])
|
self.add_attribute('type', value=str(self.__macho.header.file_type).split('.')[1])
|
||||||
self.add_attribute('name', value=self.macho.name)
|
self.add_attribute('name', value=self.__macho.name)
|
||||||
# General information
|
# General information
|
||||||
if self.macho.has_entrypoint:
|
if self.__macho.has_entrypoint:
|
||||||
self.add_attribute('entrypoint-address', value=self.macho.entrypoint)
|
self.add_attribute('entrypoint-address', value=self.__macho.entrypoint)
|
||||||
# Sections
|
# Sections
|
||||||
self.sections = []
|
self.sections = []
|
||||||
if self.macho.sections:
|
if self.__macho.sections:
|
||||||
pos = 0
|
pos = 0
|
||||||
for section in self.macho.sections:
|
for section in self.__macho.sections:
|
||||||
s = MachOSectionObject(section)
|
s = MachOSectionObject(section)
|
||||||
self.add_reference(s.uuid, 'included-in', 'Section {} of MachO'.format(pos))
|
self.add_reference(s.uuid, 'included-in', 'Section {} of MachO'.format(pos))
|
||||||
pos += 1
|
pos += 1
|
||||||
|
@ -71,18 +73,20 @@ class MachOSectionObject(AbstractMISPObjectGenerator):
|
||||||
# Python3 way
|
# Python3 way
|
||||||
# super().__init__('pe-section')
|
# super().__init__('pe-section')
|
||||||
super(MachOSectionObject, self).__init__('macho-section')
|
super(MachOSectionObject, self).__init__('macho-section')
|
||||||
self.section = section
|
self.__section = section
|
||||||
self.data = bytes(self.section.content)
|
self.__data = bytes(self.__section.content)
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable(['ObjectReference'])
|
||||||
|
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
self.add_attribute('name', value=self.section.name)
|
self.add_attribute('name', value=self.__section.name)
|
||||||
size = self.add_attribute('size-in-bytes', value=self.section.size)
|
size = self.add_attribute('size-in-bytes', value=self.__section.size)
|
||||||
if int(size.value) > 0:
|
if int(size.value) > 0:
|
||||||
self.add_attribute('entropy', value=self.section.entropy)
|
self.add_attribute('entropy', value=self.__section.entropy)
|
||||||
self.add_attribute('md5', value=md5(self.data).hexdigest())
|
self.add_attribute('md5', value=md5(self.__data).hexdigest())
|
||||||
self.add_attribute('sha1', value=sha1(self.data).hexdigest())
|
self.add_attribute('sha1', value=sha1(self.__data).hexdigest())
|
||||||
self.add_attribute('sha256', value=sha256(self.data).hexdigest())
|
self.add_attribute('sha256', value=sha256(self.__data).hexdigest())
|
||||||
self.add_attribute('sha512', value=sha512(self.data).hexdigest())
|
self.add_attribute('sha512', value=sha512(self.__data).hexdigest())
|
||||||
if HAS_PYDEEP:
|
if HAS_PYDEEP:
|
||||||
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.data).decode())
|
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.__data).decode())
|
||||||
|
|
|
@ -30,36 +30,38 @@ class PEObject(AbstractMISPObjectGenerator):
|
||||||
raise ImportError('Please install lief, documentation here: https://github.com/lief-project/LIEF')
|
raise ImportError('Please install lief, documentation here: https://github.com/lief-project/LIEF')
|
||||||
if pseudofile:
|
if pseudofile:
|
||||||
if isinstance(pseudofile, BytesIO):
|
if isinstance(pseudofile, BytesIO):
|
||||||
self.pe = lief.PE.parse(raw=pseudofile.getvalue())
|
self.__pe = lief.PE.parse(raw=pseudofile.getvalue())
|
||||||
elif isinstance(pseudofile, bytes):
|
elif isinstance(pseudofile, bytes):
|
||||||
self.pe = lief.PE.parse(raw=pseudofile)
|
self.__pe = lief.PE.parse(raw=pseudofile)
|
||||||
else:
|
else:
|
||||||
raise Exception('Pseudo file can be BytesIO or bytes got {}'.format(type(pseudofile)))
|
raise Exception('Pseudo file can be BytesIO or bytes got {}'.format(type(pseudofile)))
|
||||||
elif filepath:
|
elif filepath:
|
||||||
self.pe = lief.PE.parse(filepath)
|
self.__pe = lief.PE.parse(filepath)
|
||||||
elif parsed:
|
elif parsed:
|
||||||
# Got an already parsed blob
|
# Got an already parsed blob
|
||||||
if isinstance(parsed, lief.PE.Binary):
|
if isinstance(parsed, lief.PE.Binary):
|
||||||
self.pe = parsed
|
self.__pe = parsed
|
||||||
else:
|
else:
|
||||||
raise Exception('Not a lief.PE.Binary: {}'.format(type(parsed)))
|
raise Exception('Not a lief.PE.Binary: {}'.format(type(parsed)))
|
||||||
# Python3 way
|
# Python3 way
|
||||||
# super().__init__('pe')
|
# super().__init__('pe')
|
||||||
super(PEObject, self).__init__('pe')
|
super(PEObject, self).__init__('pe')
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable('ObjectReference')
|
||||||
|
|
||||||
def _is_exe(self):
|
def _is_exe(self):
|
||||||
if not self._is_dll() and not self._is_driver():
|
if not self._is_dll() and not self._is_driver():
|
||||||
return self.pe.header.has_characteristic(lief.PE.HEADER_CHARACTERISTICS.EXECUTABLE_IMAGE)
|
return self.__pe.header.has_characteristic(lief.PE.HEADER_CHARACTERISTICS.EXECUTABLE_IMAGE)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _is_dll(self):
|
def _is_dll(self):
|
||||||
return self.pe.header.has_characteristic(lief.PE.HEADER_CHARACTERISTICS.DLL)
|
return self.__pe.header.has_characteristic(lief.PE.HEADER_CHARACTERISTICS.DLL)
|
||||||
|
|
||||||
def _is_driver(self):
|
def _is_driver(self):
|
||||||
# List from pefile
|
# List from pefile
|
||||||
system_DLLs = set(('ntoskrnl.exe', 'hal.dll', 'ndis.sys', 'bootvid.dll', 'kdcom.dll'))
|
system_DLLs = set(('ntoskrnl.exe', 'hal.dll', 'ndis.sys', 'bootvid.dll', 'kdcom.dll'))
|
||||||
if system_DLLs.intersection([imp.lower() for imp in self.pe.libraries]):
|
if system_DLLs.intersection([imp.lower() for imp in self.__pe.libraries]):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -76,20 +78,20 @@ class PEObject(AbstractMISPObjectGenerator):
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
self.add_attribute('type', value=self._get_pe_type())
|
self.add_attribute('type', value=self._get_pe_type())
|
||||||
# General information
|
# General information
|
||||||
self.add_attribute('entrypoint-address', value=self.pe.entrypoint)
|
self.add_attribute('entrypoint-address', value=self.__pe.entrypoint)
|
||||||
self.add_attribute('compilation-timestamp', value=datetime.utcfromtimestamp(self.pe.header.time_date_stamps).isoformat())
|
self.add_attribute('compilation-timestamp', value=datetime.utcfromtimestamp(self.__pe.header.time_date_stamps).isoformat())
|
||||||
# self.imphash = self.pe.get_imphash()
|
# self.imphash = self.__pe.get_imphash()
|
||||||
try:
|
try:
|
||||||
if (self.pe.has_resources and
|
if (self.__pe.has_resources and
|
||||||
self.pe.resources_manager.has_version and
|
self.__pe.resources_manager.has_version and
|
||||||
self.pe.resources_manager.version.has_string_file_info and
|
self.__pe.resources_manager.version.has_string_file_info and
|
||||||
self.pe.resources_manager.version.string_file_info.langcode_items):
|
self.__pe.resources_manager.version.string_file_info.langcode_items):
|
||||||
fileinfo = dict(self.pe.resources_manager.version.string_file_info.langcode_items[0].items.items())
|
fileinfo = dict(self.__pe.resources_manager.version.string_file_info.langcode_items[0].items.items())
|
||||||
self.add_attribute('original-filename', value=fileinfo.get('OriginalFilename'))
|
self.add_attribute('original-filename', value=fileinfo.get('OriginalFilename'))
|
||||||
self.add_attribute('internal-filename', value=fileinfo.get('InternalName'))
|
self.add_attribute('internal-filename', value=fileinfo.get('InternalName'))
|
||||||
self.add_attribute('file-description', value=fileinfo.get('FileDescription'))
|
self.add_attribute('file-description', value=fileinfo.get('FileDescription'))
|
||||||
self.add_attribute('file-version', value=fileinfo.get('FileVersion'))
|
self.add_attribute('file-version', value=fileinfo.get('FileVersion'))
|
||||||
self.add_attribute('lang-id', value=self.pe.resources_manager.version.string_file_info.langcode_items[0].key)
|
self.add_attribute('lang-id', value=self.__pe.resources_manager.version.string_file_info.langcode_items[0].key)
|
||||||
self.add_attribute('product-name', value=fileinfo.get('ProductName'))
|
self.add_attribute('product-name', value=fileinfo.get('ProductName'))
|
||||||
self.add_attribute('product-version', value=fileinfo.get('ProductVersion'))
|
self.add_attribute('product-version', value=fileinfo.get('ProductVersion'))
|
||||||
self.add_attribute('company-name', value=fileinfo.get('CompanyName'))
|
self.add_attribute('company-name', value=fileinfo.get('CompanyName'))
|
||||||
|
@ -99,13 +101,13 @@ class PEObject(AbstractMISPObjectGenerator):
|
||||||
pass
|
pass
|
||||||
# Sections
|
# Sections
|
||||||
self.sections = []
|
self.sections = []
|
||||||
if self.pe.sections:
|
if self.__pe.sections:
|
||||||
pos = 0
|
pos = 0
|
||||||
for section in self.pe.sections:
|
for section in self.__pe.sections:
|
||||||
s = PESectionObject(section)
|
s = PESectionObject(section)
|
||||||
self.add_reference(s.uuid, 'included-in', 'Section {} of PE'.format(pos))
|
self.add_reference(s.uuid, 'included-in', 'Section {} of PE'.format(pos))
|
||||||
if ((self.pe.entrypoint >= section.virtual_address) and
|
if ((self.__pe.entrypoint >= section.virtual_address) and
|
||||||
(self.pe.entrypoint < (section.virtual_address + section.virtual_size))):
|
(self.__pe.entrypoint < (section.virtual_address + section.virtual_size))):
|
||||||
self.add_attribute('entrypoint-section-at-position', value='{}|{}'.format(section.name, pos))
|
self.add_attribute('entrypoint-section-at-position', value='{}|{}'.format(section.name, pos))
|
||||||
pos += 1
|
pos += 1
|
||||||
self.sections.append(s)
|
self.sections.append(s)
|
||||||
|
@ -119,18 +121,20 @@ class PESectionObject(AbstractMISPObjectGenerator):
|
||||||
# Python3 way
|
# Python3 way
|
||||||
# super().__init__('pe-section')
|
# super().__init__('pe-section')
|
||||||
super(PESectionObject, self).__init__('pe-section')
|
super(PESectionObject, self).__init__('pe-section')
|
||||||
self.section = section
|
self.__section = section
|
||||||
self.data = bytes(self.section.content)
|
self.__data = bytes(self.__section.content)
|
||||||
self.generate_attributes()
|
self.generate_attributes()
|
||||||
|
# Mark as non_jsonable because we need to add them manually
|
||||||
|
self.update_not_jsonable('ObjectReference')
|
||||||
|
|
||||||
def generate_attributes(self):
|
def generate_attributes(self):
|
||||||
self.add_attribute('name', value=self.section.name)
|
self.add_attribute('name', value=self.__section.name)
|
||||||
size = self.add_attribute('size-in-bytes', value=self.section.size)
|
size = self.add_attribute('size-in-bytes', value=self.__section.size)
|
||||||
if int(size.value) > 0:
|
if int(size.value) > 0:
|
||||||
self.add_attribute('entropy', value=self.section.entropy)
|
self.add_attribute('entropy', value=self.__section.entropy)
|
||||||
self.add_attribute('md5', value=md5(self.data).hexdigest())
|
self.add_attribute('md5', value=md5(self.__data).hexdigest())
|
||||||
self.add_attribute('sha1', value=sha1(self.data).hexdigest())
|
self.add_attribute('sha1', value=sha1(self.__data).hexdigest())
|
||||||
self.add_attribute('sha256', value=sha256(self.data).hexdigest())
|
self.add_attribute('sha256', value=sha256(self.__data).hexdigest())
|
||||||
self.add_attribute('sha512', value=sha512(self.data).hexdigest())
|
self.add_attribute('sha512', value=sha512(self.__data).hexdigest())
|
||||||
if HAS_PYDEEP:
|
if HAS_PYDEEP:
|
||||||
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.data).decode())
|
self.add_attribute('ssdeep', value=pydeep.hash_buf(self.__data).decode())
|
||||||
|
|
|
@ -10,10 +10,8 @@ import pymisp as pm
|
||||||
from pymisp import PyMISP
|
from pymisp import PyMISP
|
||||||
# from pymisp import NewEventError
|
# from pymisp import NewEventError
|
||||||
from pymisp import MISPEvent
|
from pymisp import MISPEvent
|
||||||
from pymisp import EncodeUpdate
|
|
||||||
from pymisp import EncodeFull
|
|
||||||
|
|
||||||
from pymisp import MISPEncode
|
from pymisp import MISPEncode
|
||||||
|
|
||||||
from pymisp.tools import make_binary_objects
|
from pymisp.tools import make_binary_objects
|
||||||
|
|
||||||
|
|
||||||
|
@ -135,8 +133,7 @@ class TestOffline(unittest.TestCase):
|
||||||
misp_event = MISPEvent(pymisp.describe_types)
|
misp_event = MISPEvent(pymisp.describe_types)
|
||||||
with open('tests/57c4445b-c548-4654-af0b-4be3950d210f.json', 'r') as f:
|
with open('tests/57c4445b-c548-4654-af0b-4be3950d210f.json', 'r') as f:
|
||||||
misp_event.load(f.read())
|
misp_event.load(f.read())
|
||||||
json.dumps(misp_event, cls=EncodeUpdate)
|
json.dumps(misp_event, cls=MISPEncode)
|
||||||
json.dumps(misp_event, cls=EncodeFull)
|
|
||||||
|
|
||||||
def test_searchIndexByTagId(self, m):
|
def test_searchIndexByTagId(self, m):
|
||||||
self.initURI(m)
|
self.initURI(m)
|
||||||
|
|
Loading…
Reference in New Issue