2016-09-26 00:26:09 +02:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import time
import json
2016-09-27 19:47:22 +02:00
from json import JSONEncoder
import os
2016-10-27 23:04:23 +02:00
import warnings
2016-11-17 17:07:29 +01:00
import base64
2016-12-07 18:37:35 +01:00
from io import BytesIO
from zipfile import ZipFile
import hashlib
2016-09-27 19:47:22 +02:00
try :
from dateutil . parser import parse
except ImportError :
pass
try :
import jsonschema
except ImportError :
pass
2016-09-26 00:26:09 +02:00
2016-11-17 17:07:29 +01:00
try :
# pyme renamed to gpg the 2016-10-28
import gpg
from gpg . constants . sig import mode
has_pyme = True
except ImportError :
2016-11-17 17:30:17 +01:00
try :
# pyme renamed to gpg the 2016-10-28
import pyme as gpg
from pyme . constants . sig import mode
has_pyme = True
except ImportError :
has_pyme = False
2016-11-17 17:07:29 +01:00
2016-09-26 00:26:09 +02:00
from . exceptions import PyMISPError , NewEventError , NewAttributeError
2016-09-28 18:50:05 +02:00
# Least dirty way to support python 2 and 3
try :
basestring
2016-10-27 23:04:23 +02:00
unicode
2016-11-17 17:29:54 +01:00
warnings . warn ( " You ' re using python 2, it is strongly recommended to use python >=3.4 " )
2016-09-28 18:50:05 +02:00
except NameError :
basestring = str
2016-10-27 21:58:58 +02:00
unicode = str
2016-09-28 18:50:05 +02:00
2016-09-26 00:26:09 +02:00
class MISPAttribute ( object ) :
2016-09-28 18:20:37 +02:00
def __init__ ( self , describe_types ) :
self . categories = describe_types [ ' categories ' ]
self . types = describe_types [ ' types ' ]
self . category_type_mapping = describe_types [ ' category_type_mappings ' ]
self . sane_default = describe_types [ ' sane_defaults ' ]
2016-09-27 19:47:22 +02:00
self . _reinitialize_attribute ( )
2016-09-26 00:26:09 +02:00
2016-09-27 19:47:22 +02:00
def _reinitialize_attribute ( self ) :
2016-09-26 00:26:09 +02:00
# Default values
self . category = None
self . type = None
self . value = None
self . to_ids = False
self . comment = ' '
self . distribution = 5
2016-09-27 19:47:22 +02:00
# other possible values
2016-12-07 18:37:35 +01:00
self . data = None
self . encrypt = False
2016-09-27 19:47:22 +02:00
self . id = None
self . uuid = None
self . timestamp = None
self . sharing_group_id = None
self . deleted = None
2016-11-17 17:07:29 +01:00
self . sig = None
2016-09-27 19:47:22 +02:00
self . SharingGroup = [ ]
self . ShadowAttribute = [ ]
2017-01-16 10:52:35 +01:00
self . disable_correlation = False
2017-01-16 20:41:32 +01:00
self . RelatedAttribute = [ ]
2017-01-18 00:20:24 +01:00
self . Tag = [ ]
2016-09-26 00:26:09 +02:00
2016-11-17 17:07:29 +01:00
def _serialize ( self ) :
return ' {type} {category} {to_ids} {uuid} {timestamp} {comment} {deleted} {value} ' . format (
type = self . type , category = self . category , to_ids = self . to_ids , uuid = self . uuid , timestamp = self . timestamp ,
comment = self . comment , deleted = self . deleted , value = self . value ) . encode ( )
2016-11-18 18:01:57 +01:00
def sign ( self , gpg_uid , passphrase = None ) :
2016-11-17 17:07:29 +01:00
if not has_pyme :
2017-02-27 11:28:12 +01:00
raise PyMISPError ( ' pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev. ' )
2016-11-17 17:07:29 +01:00
to_sign = self . _serialize ( )
with gpg . Context ( ) as c :
keys = list ( c . keylist ( gpg_uid ) )
c . signers = keys [ : 1 ]
2016-11-18 18:01:57 +01:00
if passphrase :
c . set_passphrase_cb ( lambda * args : passphrase )
2016-11-17 17:07:29 +01:00
signed , _ = c . sign ( to_sign , mode = mode . DETACH )
self . sig = base64 . b64encode ( signed ) . decode ( )
2017-01-06 22:24:39 +01:00
def delete ( self ) :
self . deleted = True
2017-01-26 14:36:01 +01:00
def add_tag ( self , tag ) :
self . Tag . append ( { ' name ' : tag } )
2016-11-17 17:07:29 +01:00
def verify ( self , gpg_uid ) :
if not has_pyme :
2017-02-27 11:28:12 +01:00
raise PyMISPError ( ' pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev. ' )
2016-11-17 17:07:29 +01:00
signed_data = self . _serialize ( )
with gpg . Context ( ) as c :
keys = list ( c . keylist ( gpg_uid ) )
2016-11-21 10:44:03 +01:00
try :
c . verify ( signed_data , signature = base64 . b64decode ( self . sig ) , verify = keys [ : 1 ] )
return { self . uuid : True }
except :
return { self . uuid : False }
2016-11-17 17:07:29 +01:00
2016-09-27 19:47:22 +02:00
def set_all_values ( self , * * kwargs ) :
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' type ' ) and kwargs . get ( ' category ' ) :
if kwargs [ ' type ' ] not in self . category_type_mapping [ kwargs [ ' category ' ] ] :
2017-02-07 14:03:10 +01:00
raise NewAttributeError ( ' {} and {} is an invalid combinaison, type for this category has to be in {} ' . format ( kwargs . get ( ' type ' ) , kwargs . get ( ' category ' ) , ( ' , ' . join ( self . category_type_mapping [ kwargs [ ' category ' ] ] ) ) ) )
2016-09-28 18:20:37 +02:00
# Required
if kwargs . get ( ' type ' ) :
self . type = kwargs [ ' type ' ]
if self . type not in self . types :
raise NewAttributeError ( ' {} is invalid, type has to be in {} ' . format ( self . type , ( ' , ' . join ( self . types ) ) ) )
2016-11-17 17:07:29 +01:00
elif not self . type :
2016-09-28 18:20:37 +02:00
raise NewAttributeError ( ' The type of the attribute is required. ' )
type_defaults = self . sane_default [ self . type ]
2017-03-12 23:05:13 +01:00
self . value = kwargs . get ( ' value ' )
if self . value is None :
2016-09-28 18:20:37 +02:00
raise NewAttributeError ( ' The value of the attribute is required. ' )
2016-09-27 19:47:22 +02:00
# Default values
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' category ' ) :
2016-09-27 19:47:22 +02:00
self . category = kwargs [ ' category ' ]
if self . category not in self . categories :
raise NewAttributeError ( ' {} is invalid, category has to be in {} ' . format ( self . category , ( ' , ' . join ( self . categories ) ) ) )
2016-09-28 18:20:37 +02:00
else :
self . category = type_defaults [ ' default_category ' ]
2017-03-14 15:58:54 +01:00
self . to_ids = kwargs . get ( ' to_ids ' )
if self . to_ids is None :
2016-09-28 18:20:37 +02:00
self . to_ids = bool ( int ( type_defaults [ ' to_ids ' ] ) )
2017-03-14 15:58:54 +01:00
if not isinstance ( self . to_ids , bool ) :
raise NewAttributeError ( ' {} is invalid, to_ids has to be True or False ' . format ( self . to_ids ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' comment ' ) :
2016-09-27 19:47:22 +02:00
self . comment = kwargs [ ' comment ' ]
2017-02-21 11:07:57 +01:00
if kwargs . get ( ' distribution ' ) is not None :
2016-09-27 19:47:22 +02:00
self . distribution = int ( kwargs [ ' distribution ' ] )
2016-12-14 15:17:33 +01:00
if self . distribution not in [ 0 , 1 , 2 , 3 , 4 , 5 ] :
raise NewAttributeError ( ' {} is invalid, the distribution has to be in 0, 1, 2, 3, 4, 5 ' . format ( self . distribution ) )
2016-09-27 19:47:22 +02:00
# other possible values
2016-12-07 18:37:35 +01:00
if kwargs . get ( ' data ' ) :
self . data = kwargs [ ' data ' ]
self . _load_data ( )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' id ' ) :
2016-09-27 19:47:22 +02:00
self . id = int ( kwargs [ ' id ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' uuid ' ) :
2016-09-27 19:47:22 +02:00
self . uuid = kwargs [ ' uuid ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' timestamp ' ) :
2017-03-15 14:03:02 +01:00
self . timestamp = datetime . datetime ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = int ( kwargs [ ' timestamp ' ] ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' sharing_group_id ' ) :
2016-09-27 19:47:22 +02:00
self . sharing_group_id = int ( kwargs [ ' sharing_group_id ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' deleted ' ) :
2016-09-27 19:47:22 +02:00
self . deleted = kwargs [ ' deleted ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' SharingGroup ' ) :
2016-09-27 19:47:22 +02:00
self . SharingGroup = kwargs [ ' SharingGroup ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' ShadowAttribute ' ) :
2016-09-27 19:47:22 +02:00
self . ShadowAttribute = kwargs [ ' ShadowAttribute ' ]
2016-11-17 17:07:29 +01:00
if kwargs . get ( ' sig ' ) :
self . sig = kwargs [ ' sig ' ]
2017-01-18 00:20:24 +01:00
if kwargs . get ( ' Tag ' ) :
2017-01-27 11:58:00 +01:00
self . Tag = [ t for t in kwargs [ ' Tag ' ] if t ]
2016-09-27 19:47:22 +02:00
2017-01-16 20:41:32 +01:00
# If the user wants to disable correlation, let them. Defaults to False.
2017-01-16 10:52:35 +01:00
self . disable_correlation = kwargs . get ( " disable_correlation " , False )
2017-03-14 15:58:54 +01:00
if self . disable_correlation is None :
self . disable_correlation = False
2017-01-16 10:52:35 +01:00
2016-12-07 18:37:35 +01:00
def _prepare_new_malware_sample ( self ) :
if ' | ' in self . value :
# Get the filename, ignore the md5, because humans.
self . malware_filename , md5 = self . value . split ( ' | ' )
else :
# Assuming the user only passed the filename
self . malware_filename = self . value
m = hashlib . md5 ( )
m . update ( self . data . getvalue ( ) )
md5 = m . hexdigest ( )
self . value = ' {} | {} ' . format ( self . malware_filename , md5 )
self . malware_binary = self . data
self . encrypt = True
def _load_data ( self ) :
if not isinstance ( self . data , BytesIO ) :
self . data = BytesIO ( base64 . b64decode ( self . data ) )
if self . type == ' malware-sample ' :
try :
with ZipFile ( self . data ) as f :
for name in f . namelist ( ) :
if name . endswith ( ' .txt ' ) :
with f . open ( name , pwd = b ' infected ' ) as unpacked :
self . malware_filename = unpacked . read ( ) . decode ( )
else :
with f . open ( name , pwd = b ' infected ' ) as unpacked :
self . malware_binary = BytesIO ( unpacked . read ( ) )
except :
# not a encrypted zip file, assuming it is a new malware sample
self . _prepare_new_malware_sample ( )
2016-09-27 19:47:22 +02:00
def _json ( self ) :
2016-09-26 00:26:09 +02:00
to_return = { ' type ' : self . type , ' category ' : self . category , ' to_ids ' : self . to_ids ,
' distribution ' : self . distribution , ' value ' : self . value ,
2017-01-16 10:52:35 +01:00
' comment ' : self . comment , ' disable_correlation ' : self . disable_correlation }
2017-01-27 11:58:00 +01:00
if self . uuid :
to_return [ ' uuid ' ] = self . uuid
2016-11-17 17:07:29 +01:00
if self . sig :
to_return [ ' sig ' ] = self . sig
2016-12-14 15:17:33 +01:00
if self . sharing_group_id :
to_return [ ' sharing_group_id ' ] = self . sharing_group_id
2017-01-18 00:20:24 +01:00
if self . Tag :
2017-01-20 01:07:24 +01:00
to_return [ ' Tag ' ] = self . Tag
2016-12-07 18:37:35 +01:00
if self . data :
to_return [ ' data ' ] = base64 . b64encode ( self . data . getvalue ( ) ) . decode ( )
if self . encrypt :
2017-03-09 17:36:30 +01:00
to_return [ ' encrypt ' ] = self . encrypt
2016-09-30 16:06:41 +02:00
to_return = _int_to_str ( to_return )
2016-09-27 19:47:22 +02:00
return to_return
def _json_full ( self ) :
to_return = self . _json ( )
if self . id :
to_return [ ' id ' ] = self . id
if self . timestamp :
2017-01-27 11:58:00 +01:00
# Should never be set on an update, MISP will automatically set it to now
2016-09-27 19:47:22 +02:00
to_return [ ' timestamp ' ] = int ( time . mktime ( self . timestamp . timetuple ( ) ) )
if self . deleted is not None :
to_return [ ' deleted ' ] = self . deleted
if self . ShadowAttribute :
to_return [ ' ShadowAttribute ' ] = self . ShadowAttribute
if self . SharingGroup :
to_return [ ' SharingGroup ' ] = self . SharingGroup
2016-09-30 16:06:41 +02:00
to_return = _int_to_str ( to_return )
2016-09-26 00:26:09 +02:00
return to_return
2016-09-27 19:47:22 +02:00
class EncodeUpdate ( JSONEncoder ) :
def default ( self , obj ) :
try :
return obj . _json ( )
except AttributeError :
return JSONEncoder . default ( self , obj )
class EncodeFull ( JSONEncoder ) :
def default ( self , obj ) :
try :
return obj . _json_full ( )
except AttributeError :
return JSONEncoder . default ( self , obj )
2016-09-30 16:06:41 +02:00
def _int_to_str ( d ) :
# transform all integer back to string
for k , v in d . items ( ) :
2017-03-14 15:58:54 +01:00
if isinstance ( v , ( int , float ) ) and not isinstance ( v , bool ) :
2016-09-30 16:06:41 +02:00
d [ k ] = str ( v )
return d
2016-09-26 00:26:09 +02:00
class MISPEvent ( object ) :
2016-09-27 19:47:22 +02:00
def __init__ ( self , describe_types = None ) :
self . ressources_path = os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , ' data ' )
2016-12-07 18:37:35 +01:00
with open ( os . path . join ( self . ressources_path , ' schema.json ' ) , ' r ' ) as f :
2016-12-02 16:53:45 +01:00
self . json_schema = json . load ( f )
with open ( os . path . join ( self . ressources_path , ' schema-lax.json ' ) , ' r ' ) as f :
self . json_schema_lax = json . load ( f )
2016-09-27 19:47:22 +02:00
if not describe_types :
t = json . load ( open ( os . path . join ( self . ressources_path , ' describeTypes.json ' ) , ' r ' ) )
describe_types = t [ ' result ' ]
2016-09-28 18:20:37 +02:00
self . describe_types = describe_types
2016-09-26 00:26:09 +02:00
self . categories = describe_types [ ' categories ' ]
self . types = describe_types [ ' types ' ]
self . category_type_mapping = describe_types [ ' category_type_mappings ' ]
self . sane_default = describe_types [ ' sane_defaults ' ]
self . new = True
self . dump_full = False
2016-09-27 19:47:22 +02:00
self . _reinitialize_event ( )
def _reinitialize_event ( self ) :
# Default values for a valid event to send to a MISP instance
2016-09-26 00:26:09 +02:00
self . distribution = 3
self . threat_level_id = 2
self . analysis = 0
2016-09-28 18:20:37 +02:00
self . info = None
2016-09-26 00:26:09 +02:00
self . published = False
self . date = datetime . date . today ( )
self . attributes = [ ]
2016-09-27 19:47:22 +02:00
# All other keys
2016-11-17 17:07:29 +01:00
self . sig = None
self . global_sig = None
2016-09-27 19:47:22 +02:00
self . id = None
self . orgc_id = None
self . org_id = None
self . uuid = None
self . attribute_count = None
self . timestamp = None
self . proposal_email_lock = None
self . locked = None
self . publish_timestamp = None
self . sharing_group_id = None
self . Org = None
self . Orgc = None
self . ShadowAttribute = [ ]
self . RelatedEvent = [ ]
self . Tag = [ ]
2016-12-09 17:32:03 +01:00
self . Galaxy = None
2016-09-26 00:26:09 +02:00
2016-11-17 17:07:29 +01:00
def _serialize ( self ) :
return ' {date} {threat_level_id} {info} {uuid} {analysis} {timestamp} ' . format (
date = self . date , threat_level_id = self . threat_level_id , info = self . info ,
uuid = self . uuid , analysis = self . analysis , timestamp = self . timestamp ) . encode ( )
def _serialize_sigs ( self ) :
all_sigs = self . sig
for a in self . attributes :
all_sigs + = a . sig
return all_sigs . encode ( )
2016-11-18 18:01:57 +01:00
def sign ( self , gpg_uid , passphrase = None ) :
2016-11-17 17:07:29 +01:00
if not has_pyme :
2017-02-27 11:28:12 +01:00
raise PyMISPError ( ' pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev. ' )
2016-11-17 17:07:29 +01:00
to_sign = self . _serialize ( )
with gpg . Context ( ) as c :
keys = list ( c . keylist ( gpg_uid ) )
c . signers = keys [ : 1 ]
2016-11-18 18:01:57 +01:00
if passphrase :
c . set_passphrase_cb ( lambda * args : passphrase )
2016-11-17 17:07:29 +01:00
signed , _ = c . sign ( to_sign , mode = mode . DETACH )
self . sig = base64 . b64encode ( signed ) . decode ( )
for a in self . attributes :
2016-11-18 18:01:57 +01:00
a . sign ( gpg_uid , passphrase )
2016-11-17 17:07:29 +01:00
to_sign_global = self . _serialize_sigs ( )
with gpg . Context ( ) as c :
keys = list ( c . keylist ( gpg_uid ) )
c . signers = keys [ : 1 ]
2016-11-18 18:01:57 +01:00
if passphrase :
c . set_passphrase_cb ( lambda * args : passphrase )
2016-11-17 17:07:29 +01:00
signed , _ = c . sign ( to_sign_global , mode = mode . DETACH )
self . global_sig = base64 . b64encode ( signed ) . decode ( )
def verify ( self , gpg_uid ) :
if not has_pyme :
2017-02-27 11:28:12 +01:00
raise PyMISPError ( ' pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev. ' )
2016-11-21 10:44:03 +01:00
to_return = { }
2016-11-17 17:07:29 +01:00
signed_data = self . _serialize ( )
with gpg . Context ( ) as c :
keys = list ( c . keylist ( gpg_uid ) )
2016-11-21 10:44:03 +01:00
try :
c . verify ( signed_data , signature = base64 . b64decode ( self . sig ) , verify = keys [ : 1 ] )
to_return [ self . uuid ] = True
except :
to_return [ self . uuid ] = False
2016-11-17 17:07:29 +01:00
for a in self . attributes :
2016-11-21 10:44:03 +01:00
to_return . update ( a . verify ( gpg_uid ) )
2016-11-17 17:07:29 +01:00
to_verify_global = self . _serialize_sigs ( )
with gpg . Context ( ) as c :
keys = list ( c . keylist ( gpg_uid ) )
2016-11-21 10:44:03 +01:00
try :
c . verify ( to_verify_global , signature = base64 . b64decode ( self . global_sig ) , verify = keys [ : 1 ] )
to_return [ ' global ' ] = True
except :
to_return [ ' global ' ] = False
return to_return
2016-11-17 17:07:29 +01:00
2017-02-27 11:28:12 +01:00
def load_file ( self , event_path ) :
if not os . path . exists ( event_path ) :
raise PyMISPError ( ' Invalid path, unable to load the event. ' )
with open ( event_path , ' r ' ) as f :
self . load ( f )
2016-09-26 00:26:09 +02:00
def load ( self , json_event ) :
self . new = False
self . dump_full = True
2016-10-11 17:45:38 +02:00
if hasattr ( json_event , ' read ' ) :
# python2 and python3 compatible to find if we have a file
json_event = json_event . read ( )
2016-09-28 18:50:05 +02:00
if isinstance ( json_event , basestring ) :
2016-10-11 17:45:38 +02:00
json_event = json . loads ( json_event )
if json_event . get ( ' response ' ) :
event = json_event . get ( ' response ' ) [ 0 ]
2016-09-26 00:26:09 +02:00
else :
2016-09-27 19:47:22 +02:00
event = json_event
2016-10-11 17:45:38 +02:00
if not event :
raise PyMISPError ( ' Invalid event ' )
# Invalid event created by MISP up to 2.4.52 (attribute_count is none instead of '0')
if event . get ( ' Event ' ) and event . get ( ' Event ' ) . get ( ' attribute_count ' ) is None :
event [ ' Event ' ] [ ' attribute_count ' ] = ' 0 '
2016-09-27 19:47:22 +02:00
jsonschema . validate ( event , self . json_schema_lax )
e = event . get ( ' Event ' )
self . _reinitialize_event ( )
self . set_all_values ( * * e )
2016-10-29 21:27:48 +02:00
def set_date ( self , date , ignore_invalid = False ) :
if isinstance ( date , basestring ) or isinstance ( date , unicode ) :
self . date = parse ( date ) . date ( )
elif isinstance ( date , datetime . datetime ) :
self . date = date . date ( )
elif isinstance ( date , datetime . date ) :
self . date = date
else :
if ignore_invalid :
self . date = datetime . date . today ( )
else :
raise NewEventError ( ' Invalid format for the date: {} - {} ' . format ( date , type ( date ) ) )
2016-09-27 19:47:22 +02:00
def set_all_values ( self , * * kwargs ) :
2016-09-28 18:20:37 +02:00
# Required value
if kwargs . get ( ' info ' ) :
self . info = kwargs [ ' info ' ]
2016-11-17 17:07:29 +01:00
elif not self . info :
2016-09-28 18:20:37 +02:00
raise NewAttributeError ( ' The info field of the new event is required. ' )
2016-09-27 19:47:22 +02:00
# Default values for a valid event to send to a MISP instance
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' distribution ' ) is not None :
2016-09-27 19:47:22 +02:00
self . distribution = int ( kwargs [ ' distribution ' ] )
2016-12-14 15:17:33 +01:00
if self . distribution not in [ 0 , 1 , 2 , 3 , 4 ] :
raise NewEventError ( ' {} is invalid, the distribution has to be in 0, 1, 2, 3, 4 ' . format ( self . distribution ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' threat_level_id ' ) is not None :
2016-09-27 19:47:22 +02:00
self . threat_level_id = int ( kwargs [ ' threat_level_id ' ] )
if self . threat_level_id not in [ 1 , 2 , 3 , 4 ] :
raise NewEventError ( ' {} is invalid, the threat_level has to be in 1, 2, 3, 4 ' . format ( self . threat_level_id ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' analysis ' ) is not None :
2016-09-27 19:47:22 +02:00
self . analysis = int ( kwargs [ ' analysis ' ] )
if self . analysis not in [ 0 , 1 , 2 ] :
raise NewEventError ( ' {} is invalid, the analysis has to be in 0, 1, 2 ' . format ( self . analysis ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' published ' ) is not None :
2017-02-06 12:06:37 +01:00
self . unpublish ( )
2017-03-12 23:05:13 +01:00
if kwargs . get ( " published " ) is True :
2016-09-26 00:26:09 +02:00
self . publish ( )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' date ' ) :
2016-10-29 21:27:48 +02:00
self . set_date ( kwargs [ ' date ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' Attribute ' ) :
2016-09-27 19:47:22 +02:00
for a in kwargs [ ' Attribute ' ] :
2016-09-28 18:20:37 +02:00
attribute = MISPAttribute ( self . describe_types )
2016-09-27 19:47:22 +02:00
attribute . set_all_values ( * * a )
self . attributes . append ( attribute )
# All other keys
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' id ' ) :
2016-09-27 19:47:22 +02:00
self . id = int ( kwargs [ ' id ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' orgc_id ' ) :
2016-09-27 19:47:22 +02:00
self . orgc_id = int ( kwargs [ ' orgc_id ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' org_id ' ) :
2016-09-27 19:47:22 +02:00
self . org_id = int ( kwargs [ ' org_id ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' uuid ' ) :
2016-09-27 19:47:22 +02:00
self . uuid = kwargs [ ' uuid ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' attribute_count ' ) :
2016-09-27 19:47:22 +02:00
self . attribute_count = int ( kwargs [ ' attribute_count ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' timestamp ' ) :
2017-03-15 14:03:02 +01:00
self . timestamp = datetime . datetime ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = int ( kwargs [ ' timestamp ' ] ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' proposal_email_lock ' ) :
2016-09-27 19:47:22 +02:00
self . proposal_email_lock = kwargs [ ' proposal_email_lock ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' locked ' ) :
2016-09-27 19:47:22 +02:00
self . locked = kwargs [ ' locked ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' publish_timestamp ' ) :
2017-03-14 16:07:31 +01:00
self . publish_timestamp = datetime . datetime ( 1970 , 1 , 1 ) + datetime . timedelta ( seconds = int ( kwargs [ ' publish_timestamp ' ] ) )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' sharing_group_id ' ) :
2016-09-27 19:47:22 +02:00
self . sharing_group_id = int ( kwargs [ ' sharing_group_id ' ] )
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' Org ' ) :
2016-09-27 19:47:22 +02:00
self . Org = kwargs [ ' Org ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' Orgc ' ) :
2016-09-27 19:47:22 +02:00
self . Orgc = kwargs [ ' Orgc ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' ShadowAttribute ' ) :
2016-09-27 19:47:22 +02:00
self . ShadowAttribute = kwargs [ ' ShadowAttribute ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' RelatedEvent ' ) :
2017-01-06 22:24:39 +01:00
self . RelatedEvent = [ ]
for rel_event in kwargs [ ' RelatedEvent ' ] :
sub_event = MISPEvent ( )
sub_event . load ( rel_event )
self . RelatedEvent . append ( sub_event )
2016-12-09 17:32:03 +01:00
if kwargs . get ( ' Galaxy ' ) :
self . Galaxy = kwargs [ ' Galaxy ' ]
2016-09-28 18:20:37 +02:00
if kwargs . get ( ' Tag ' ) :
2017-01-27 11:58:00 +01:00
self . Tag = [ t for t in kwargs [ ' Tag ' ] if t ]
2016-11-17 17:07:29 +01:00
if kwargs . get ( ' sig ' ) :
self . sig = kwargs [ ' sig ' ]
if kwargs . get ( ' global_sig ' ) :
self . global_sig = kwargs [ ' global_sig ' ]
2016-09-27 19:47:22 +02:00
def _json ( self ) :
2016-09-26 00:26:09 +02:00
to_return = { ' Event ' : { } }
to_return [ ' Event ' ] = { ' distribution ' : self . distribution , ' info ' : self . info ,
' date ' : self . date . isoformat ( ) , ' published ' : self . published ,
' threat_level_id ' : self . threat_level_id ,
' analysis ' : self . analysis , ' Attribute ' : [ ] }
2016-11-17 17:07:29 +01:00
if self . sig :
2016-12-03 18:37:13 +01:00
to_return [ ' Event ' ] [ ' sig ' ] = self . sig
2016-11-17 17:07:29 +01:00
if self . global_sig :
2016-12-03 18:37:13 +01:00
to_return [ ' Event ' ] [ ' global_sig ' ] = self . global_sig
2016-09-27 19:47:22 +02:00
if self . uuid :
to_return [ ' Event ' ] [ ' uuid ' ] = self . uuid
if self . Tag :
to_return [ ' Event ' ] [ ' Tag ' ] = self . Tag
2016-12-03 18:37:13 +01:00
if self . Orgc :
to_return [ ' Event ' ] [ ' Orgc ' ] = self . Orgc
2016-12-09 17:32:03 +01:00
if self . Galaxy :
to_return [ ' Event ' ] [ ' Galaxy ' ] = self . Galaxy
2016-12-14 15:17:33 +01:00
if self . sharing_group_id :
to_return [ ' Event ' ] [ ' sharing_group_id ' ] = self . sharing_group_id
2016-09-30 16:06:41 +02:00
to_return [ ' Event ' ] = _int_to_str ( to_return [ ' Event ' ] )
2016-10-10 12:23:03 +02:00
if self . attributes :
to_return [ ' Event ' ] [ ' Attribute ' ] = [ a . _json ( ) for a in self . attributes ]
2016-09-27 19:47:22 +02:00
jsonschema . validate ( to_return , self . json_schema )
return to_return
def _json_full ( self ) :
to_return = self . _json ( )
2016-12-03 18:37:13 +01:00
if self . id :
to_return [ ' Event ' ] [ ' id ' ] = self . id
if self . orgc_id :
to_return [ ' Event ' ] [ ' orgc_id ' ] = self . orgc_id
if self . org_id :
to_return [ ' Event ' ] [ ' org_id ' ] = self . org_id
2016-09-27 19:47:22 +02:00
if self . locked is not None :
to_return [ ' Event ' ] [ ' locked ' ] = self . locked
2016-09-30 16:06:41 +02:00
if self . attribute_count is not None :
2016-09-27 19:47:22 +02:00
to_return [ ' Event ' ] [ ' attribute_count ' ] = self . attribute_count
if self . RelatedEvent :
2017-01-06 22:24:39 +01:00
to_return [ ' Event ' ] [ ' RelatedEvent ' ] = [ ]
for rel_event in self . RelatedEvent :
to_return [ ' Event ' ] [ ' RelatedEvent ' ] . append ( rel_event . _json_full ( ) )
2016-09-27 19:47:22 +02:00
if self . Org :
to_return [ ' Event ' ] [ ' Org ' ] = self . Org
2017-01-09 16:19:20 +01:00
if self . sharing_group_id :
to_return [ ' Event ' ] [ ' sharing_group_id ' ] = self . sharing_group_id
2016-09-27 19:47:22 +02:00
if self . ShadowAttribute :
to_return [ ' Event ' ] [ ' ShadowAttribute ' ] = self . ShadowAttribute
if self . proposal_email_lock is not None :
to_return [ ' Event ' ] [ ' proposal_email_lock ' ] = self . proposal_email_lock
if self . locked is not None :
to_return [ ' Event ' ] [ ' locked ' ] = self . locked
if self . publish_timestamp :
to_return [ ' Event ' ] [ ' publish_timestamp ' ] = int ( time . mktime ( self . publish_timestamp . timetuple ( ) ) )
if self . timestamp :
2017-01-27 11:58:00 +01:00
# Should never be set on an update, MISP will automatically set it to now
2016-09-27 19:47:22 +02:00
to_return [ ' Event ' ] [ ' timestamp ' ] = int ( time . mktime ( self . timestamp . timetuple ( ) ) )
2016-09-30 16:06:41 +02:00
to_return [ ' Event ' ] = _int_to_str ( to_return [ ' Event ' ] )
2016-10-10 12:23:03 +02:00
if self . attributes :
to_return [ ' Event ' ] [ ' Attribute ' ] = [ a . _json_full ( ) for a in self . attributes ]
2016-09-27 19:47:22 +02:00
jsonschema . validate ( to_return , self . json_schema )
return to_return
2016-09-26 00:26:09 +02:00
2017-01-26 14:36:01 +01:00
def add_tag ( self , tag ) :
self . Tag . append ( { ' name ' : tag } )
def add_attribute_tag ( self , tag , attribute_identifier ) :
attribute = None
for a in self . attributes :
if a . id == attribute_identifier or a . uuid == attribute_identifier or attribute_identifier in a . value :
a . add_tag ( tag )
attribute = a
if not attribute :
raise Exception ( ' No attribute with identifier {} found. ' . format ( attribute_identifier ) )
return attribute
2016-09-26 00:26:09 +02:00
def publish ( self ) :
2016-09-27 19:47:22 +02:00
self . published = True
2016-09-26 00:26:09 +02:00
def unpublish ( self ) :
2016-09-27 19:47:22 +02:00
self . published = False
2016-09-26 00:26:09 +02:00
2017-01-02 16:53:23 +01:00
def delete_attribute ( self , attribute_id ) :
found = False
for a in self . attributes :
if a . id == attribute_id or a . uuid == attribute_id :
2017-01-06 22:24:39 +01:00
a . delete ( )
2017-01-02 16:53:23 +01:00
found = True
break
if not found :
raise Exception ( ' No attribute with UUID/ID {} found. ' . format ( attribute_id ) )
2016-09-28 18:20:37 +02:00
def add_attribute ( self , type , value , * * kwargs ) :
attribute = MISPAttribute ( self . describe_types )
attribute . set_all_values ( type = type , value = value , * * kwargs )
2016-09-26 00:26:09 +02:00
self . attributes . append ( attribute )