2017-07-24 13:26:22 +02:00
2014-03-19 19:10:36 +01:00
# -*- coding: utf-8 -*-
2016-08-26 14:19:19 +02:00
""" Python API using the REST interface of MISP """
2014-03-19 19:10:36 +01:00
2016-11-03 21:01:48 +01:00
import sys
2015-02-16 14:31:29 +01:00
import json
import datetime
2015-08-04 16:24:55 +02:00
import os
import base64
2015-09-18 12:03:56 +02:00
import re
2016-10-19 18:46:47 +02:00
import warnings
2017-03-09 16:32:51 +01:00
import functools
2017-05-03 14:14:13 +02:00
import logging
2017-03-09 16:32:51 +01:00
2015-09-18 12:03:56 +02:00
2015-09-02 13:56:08 +02:00
try :
from urllib . parse import urljoin
except ImportError :
from urlparse import urljoin
2017-08-23 16:33:07 +02:00
warnings . warn ( " You ' re using python 2, it is strongly recommended to use python >=3.5 " )
2017-03-09 16:32:51 +01:00
from io import BytesIO , open
2015-08-07 17:24:03 +02:00
import zipfile
2014-04-14 10:55:20 +02:00
2015-09-18 12:03:56 +02:00
try :
import requests
HAVE_REQUESTS = True
except ImportError :
HAVE_REQUESTS = False
2017-02-16 16:39:36 +01:00
try :
from requests_futures . sessions import FuturesSession
ASYNC_OK = True
except ImportError :
ASYNC_OK = False
2015-09-18 12:03:56 +02:00
from . import __version__
2016-09-28 18:20:37 +02:00
from . exceptions import PyMISPError , SearchError , MissingDependency , NoURL , NoKey
2017-09-12 16:46:06 +02:00
from . mispevent import MISPEvent , MISPAttribute
from . abstract import MISPEncode
2015-09-18 12:03:56 +02:00
2017-05-03 14:14:13 +02:00
logger = logging . getLogger ( __name__ )
2016-09-28 18:20:37 +02:00
2015-09-02 13:56:08 +02:00
# Least dirty way to support python 2 and 3
try :
basestring
2017-02-17 10:32:25 +01:00
unicode
2016-11-17 17:29:54 +01:00
warnings . warn ( " You ' re using python 2, it is strongly recommended to use python >=3.4 " )
2015-09-02 13:56:08 +02:00
except NameError :
basestring = str
2017-02-17 10:32:25 +01:00
unicode = str
2015-09-02 13:56:08 +02:00
2015-08-05 16:01:57 +02:00
2017-03-09 16:32:51 +01:00
def deprecated ( func ) :
''' This is a decorator which can be used to mark functions
as deprecated . It will result in a warning being emitted
when the function is used . '''
2016-08-16 16:51:35 +02:00
2017-03-09 16:32:51 +01:00
@functools.wraps ( func )
def new_func ( * args , * * kwargs ) :
2017-05-03 16:52:57 +02:00
warnings . showwarning (
" Call to deprecated function {} . " . format ( func . __name__ ) ,
category = DeprecationWarning ,
filename = func . __code__ . co_filename ,
lineno = func . __code__ . co_firstlineno + 1
)
2017-03-09 16:32:51 +01:00
return func ( * args , * * kwargs )
return new_func
2016-08-16 16:51:35 +02:00
2014-04-11 18:45:52 +02:00
class PyMISP ( object ) :
2017-01-16 15:27:44 +01:00
""" Python API for MISP
: param url : URL of the MISP instance you want to connect to
: param key : API key of the user you want to use
: param ssl : can be True or False ( to check ot not the validity
of the certificate . Or a CA_BUNDLE in case of self
signed certiifcate ( the concatenation of all the
* . crt of the chain )
: param out_type : Type of object ( json ) NOTE : XML output isn ' t supported anymore, keeping the flag for compatibility reasons.
2017-05-03 14:14:13 +02:00
: param debug : deprecated , configure logging in api client instead
2017-01-16 15:27:44 +01:00
: param proxies : Proxy dict as describes here : http : / / docs . python - requests . org / en / master / user / advanced / #proxies
: param cert : Client certificate , as described there : http : / / docs . python - requests . org / en / master / user / advanced / #ssl-cert-verification
2017-05-02 11:11:01 +02:00
: param asynch : Use asynchronous processing where possible
2014-04-16 14:09:56 +02:00
"""
2017-05-03 14:14:13 +02:00
def __init__ ( self , url , key , ssl = True , out_type = ' json ' , debug = None , proxies = None , cert = None , asynch = False ) :
2015-09-18 17:48:10 +02:00
if not url :
raise NoURL ( ' Please provide the URL of your MISP instance. ' )
if not key :
raise NoKey ( ' Please provide your authorization key. ' )
2015-08-07 17:24:03 +02:00
self . root_url = url
2014-04-11 18:45:52 +02:00
self . key = key
2014-04-16 14:09:56 +02:00
self . ssl = ssl
2016-07-28 09:49:40 +02:00
self . proxies = proxies
2016-08-26 09:11:01 +02:00
self . cert = cert
2017-05-03 16:49:51 +02:00
self . asynch = asynch
2017-02-16 16:39:36 +01:00
if asynch and not ASYNC_OK :
warnings . warn ( " You turned on Async, but don ' t have requests_futures installed " )
self . asynch = False
2017-02-16 16:23:42 +01:00
2017-07-27 17:14:49 +02:00
self . resources_path = os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , ' data ' )
2016-08-09 13:58:54 +02:00
if out_type != ' json ' :
raise PyMISPError ( ' The only output type supported by PyMISP is JSON. If you still rely on XML, use PyMISP v2.4.49 ' )
2017-05-03 14:14:13 +02:00
if debug is not None :
2017-05-03 16:49:51 +02:00
warnings . warn ( ' debug is deprecated, configure logging in api client ' )
2014-04-11 18:45:52 +02:00
2015-09-21 14:40:06 +02:00
try :
# Make sure the MISP instance is working and the URL is valid
2017-01-27 11:58:00 +01:00
response = self . get_recommended_api_version ( )
2017-07-20 15:33:15 +02:00
if response . get ( ' errors ' ) :
logger . warning ( response . get ( ' errors ' ) [ 0 ] )
elif not response . get ( ' version ' ) :
2017-05-03 14:14:13 +02:00
logger . warning ( " Unable to check the recommended PyMISP version (MISP <2.4.60), please upgrade. " )
2017-01-27 11:58:00 +01:00
else :
2017-05-03 14:47:06 +02:00
pymisp_version_tup = tuple ( int ( x ) for x in __version__ . split ( ' . ' ) )
recommended_version_tup = tuple ( int ( x ) for x in response [ ' version ' ] . split ( ' . ' ) )
if recommended_version_tup < pymisp_version_tup :
logger . warning ( " The version of PyMISP recommended by the MISP instance ( {} ) is older than the one you ' re using now ( {} ). Please upgrade the MISP instance or use an older PyMISP version. " . format ( response [ ' version ' ] , __version__ ) )
elif pymisp_version_tup < recommended_version_tup :
logger . warning ( " The version of PyMISP recommended by the MISP instance ( {} ) is newer than the one you ' re using now ( {} ). Please upgrade PyMISP. " . format ( response [ ' version ' ] , __version__ ) )
2016-12-14 15:42:43 +01:00
2015-09-21 14:40:06 +02:00
except Exception as e :
raise PyMISPError ( ' Unable to connect to MISP ( {} ). Please make sure the API key and the URL are correct (http/https is required): {} ' . format ( self . root_url , e ) )
2016-10-10 12:24:17 +02:00
try :
session = self . __prepare_session ( )
response = session . get ( urljoin ( self . root_url , ' attributes/describeTypes.json ' ) )
describe_types = self . _check_response ( response )
if describe_types . get ( ' error ' ) :
for e in describe_types . get ( ' error ' ) :
raise PyMISPError ( ' Failed: {} ' . format ( e ) )
self . describe_types = describe_types [ ' result ' ]
if not self . describe_types . get ( ' sane_defaults ' ) :
raise PyMISPError ( ' The MISP server your are trying to reach is outdated (<2.4.52). Please use PyMISP v2.4.51.1 (pip install -I PyMISP==v2.4.51.1) and/or contact your administrator. ' )
2017-05-03 13:22:15 +02:00
except Exception :
2017-07-27 17:14:49 +02:00
with open ( os . path . join ( self . resources_path , ' describeTypes.json ' ) , ' r ' ) as f :
2017-01-25 15:09:12 +01:00
describe_types = json . load ( f )
2016-10-10 12:24:17 +02:00
self . describe_types = describe_types [ ' result ' ]
self . categories = self . describe_types [ ' categories ' ]
self . types = self . describe_types [ ' types ' ]
self . category_type_mapping = self . describe_types [ ' category_type_mappings ' ]
self . sane_default = self . describe_types [ ' sane_defaults ' ]
2016-04-14 10:29:36 +02:00
2017-02-16 16:39:36 +01:00
def __prepare_session ( self , output = ' json ' , async_implemented = False ) :
2017-10-21 20:22:38 +02:00
""" Prepare the session headers """
2017-01-16 15:27:44 +01:00
2015-09-18 12:03:56 +02:00
if not HAVE_REQUESTS :
raise MissingDependency ( ' Missing dependency, install requests (`pip install requests`) ' )
2017-02-16 16:39:36 +01:00
if self . asynch and async_implemented :
session = FuturesSession ( )
else :
2017-05-03 16:49:51 +02:00
session = requests . Session ( )
2014-04-16 14:09:56 +02:00
session . verify = self . ssl
2016-07-28 09:49:40 +02:00
session . proxies = self . proxies
2016-08-26 09:11:01 +02:00
session . cert = self . cert
2014-04-14 10:55:20 +02:00
session . headers . update (
{ ' Authorization ' : self . key ,
2016-08-11 17:45:32 +02:00
' Accept ' : ' application/ {} ' . format ( output ) ,
2016-10-07 16:50:57 +02:00
' content-type ' : ' application/ {} ' . format ( output ) ,
2016-11-03 21:01:48 +01:00
' User-Agent ' : ' PyMISP {} - Python {} . {} . {} ' . format ( __version__ , * sys . version_info ) } )
2014-04-11 18:45:52 +02:00
return session
2017-03-09 16:32:51 +01:00
# #####################
# ### Core helpers ####
# #####################
2015-12-19 17:57:29 +01:00
def flatten_error_messages ( self , response ) :
2017-10-21 20:22:38 +02:00
""" Dirty dirty method to normalize the error messages between the API calls.
Any response containing the a key ' error ' or ' errors ' failed at some point ,
we make one single list out of it .
"""
2015-12-19 17:57:29 +01:00
messages = [ ]
if response . get ( ' error ' ) :
if isinstance ( response [ ' error ' ] , list ) :
for e in response [ ' errors ' ] :
messages . append ( e [ ' error ' ] [ ' value ' ] [ 0 ] )
else :
messages . append ( [ ' error ' ] )
elif response . get ( ' errors ' ) :
if isinstance ( response [ ' errors ' ] , dict ) :
for where , errors in response [ ' errors ' ] . items ( ) :
2016-08-26 18:22:41 +02:00
if isinstance ( errors , dict ) :
for where , msg in errors . items ( ) :
2016-08-27 18:13:15 +02:00
if isinstance ( msg , list ) :
for m in msg :
messages . append ( ' Error in {} : {} ' . format ( where , m ) )
else :
messages . append ( ' Error in {} : {} ' . format ( where , msg ) )
2016-08-26 18:22:41 +02:00
else :
2017-07-15 20:35:00 +02:00
if isinstance ( errors , list ) :
for e in errors :
if not e :
continue
if isinstance ( e , basestring ) :
messages . append ( e )
continue
for type_e , msgs in e . items ( ) :
for m in msgs :
messages . append ( ' Error in {} : {} ' . format ( where , m ) )
else :
messages . append ( ' {} ( {} ) ' . format ( errors , where ) )
2015-12-19 17:57:29 +01:00
return messages
2015-09-23 18:47:47 +02:00
def _check_response ( self , response ) :
2017-10-21 20:22:38 +02:00
""" Check if the response from the server is not an unexpected error """
2015-09-23 18:47:47 +02:00
if response . status_code > = 500 :
response . raise_for_status ( )
2015-11-25 09:51:22 +01:00
try :
to_return = response . json ( )
2017-05-03 13:22:15 +02:00
except ValueError :
2017-05-03 14:14:13 +02:00
logger . debug ( response . text )
2015-11-25 09:51:22 +01:00
raise PyMISPError ( ' Unknown error: {} ' . format ( response . text ) )
2015-12-19 17:57:29 +01:00
errors = [ ]
2017-07-12 11:51:26 +02:00
if isinstance ( to_return , ( list , str ) ) :
2016-04-04 18:34:08 +02:00
to_return = { ' response ' : to_return }
2015-12-19 17:57:29 +01:00
if to_return . get ( ' error ' ) :
if not isinstance ( to_return [ ' error ' ] , list ) :
errors . append ( to_return [ ' error ' ] )
else :
errors + = to_return [ ' error ' ]
2017-02-22 09:03:47 +01:00
if to_return . get ( ' errors ' ) :
if not isinstance ( to_return [ ' errors ' ] , list ) :
errors . append ( to_return [ ' errors ' ] )
else :
errors + = to_return [ ' errors ' ]
2015-12-19 17:57:29 +01:00
2015-09-23 18:47:47 +02:00
if 400 < = response . status_code < 500 :
2017-02-22 09:03:47 +01:00
if not errors and to_return . get ( ' message ' ) :
2015-12-19 17:57:29 +01:00
errors . append ( to_return [ ' message ' ] )
else :
2017-05-02 13:09:59 +02:00
errors . append ( str ( response . status_code ) )
2015-12-19 17:57:29 +01:00
errors + = self . flatten_error_messages ( to_return )
if errors :
to_return [ ' errors ' ] = errors
2017-05-03 14:14:13 +02:00
if logger . isEnabledFor ( logging . DEBUG ) :
logger . debug ( json . dumps ( to_return , indent = 4 ) )
2015-09-23 18:47:47 +02:00
return to_return
2017-03-09 16:32:51 +01:00
def _one_or_more ( self , value ) :
""" Returns a list/tuple of one or more items, regardless of input. """
return value if isinstance ( value , ( tuple , list ) ) else ( value , )
def _make_mispevent ( self , event ) :
2017-10-21 20:22:38 +02:00
""" Transform a Json MISP event into a MISPEvent """
2017-03-09 16:32:51 +01:00
if not isinstance ( event , MISPEvent ) :
e = MISPEvent ( self . describe_types )
e . load ( event )
else :
e = event
return e
def _prepare_full_event ( self , distribution , threat_level_id , analysis , info , date = None , published = False , orgc_id = None , org_id = None , sharing_group_id = None ) :
2017-10-21 20:22:38 +02:00
""" Initialize a new MISPEvent from scratch """
2017-03-09 16:32:51 +01:00
misp_event = MISPEvent ( self . describe_types )
misp_event . set_all_values ( info = info , distribution = distribution , threat_level_id = threat_level_id ,
analysis = analysis , date = date , orgc_id = orgc_id , org_id = org_id , sharing_group_id = sharing_group_id )
if published :
misp_event . publish ( )
return misp_event
def _prepare_full_attribute ( self , category , type_value , value , to_ids , comment = None , distribution = 5 , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Initialize a new MISPAttribute from scratch """
2017-03-09 16:32:51 +01:00
misp_attribute = MISPAttribute ( self . describe_types )
misp_attribute . set_all_values ( type = type_value , value = value , category = category ,
to_ids = to_ids , comment = comment , distribution = distribution , * * kwargs )
return misp_attribute
def _valid_uuid ( self , uuid ) :
""" Test if uuid is valid
Will test against CakeText ' s RFC 4122, i.e
" the third group must start with a 4,
and the fourth group must start with 8 , 9 , a or b . "
: param uuid : an uuid
"""
regex = re . compile ( ' ^[a-f0-9] {8} -?[a-f0-9] {4} -?4[a-f0-9] {3} -?[89ab][a-f0-9] {3} -?[a-f0-9] {12} \ Z ' , re . I )
match = regex . match ( uuid )
return bool ( match )
2015-09-01 10:31:22 +02:00
# ################################################
# ############### Simple REST API ################
# ################################################
2014-04-11 18:45:52 +02:00
2017-07-21 15:16:00 +02:00
def test_connection ( self ) :
2017-07-20 15:33:15 +02:00
""" Test the auth key """
response = self . get_version ( )
if response . get ( ' errors ' ) :
raise PyMISPError ( response . get ( ' errors ' ) [ 0 ] )
return True
2016-08-09 13:58:54 +02:00
def get_index ( self , filters = None ) :
2017-01-16 15:27:44 +01:00
""" Return the index.
2014-04-11 18:45:52 +02:00
2017-01-16 15:27:44 +01:00
Warning , there ' s a limit on the number of results
2014-04-11 18:45:52 +02:00
"""
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2016-03-01 16:20:10 +01:00
url = urljoin ( self . root_url , ' events/index ' )
2016-03-01 15:32:58 +01:00
if filters is not None :
filters = json . dumps ( filters )
2016-08-09 13:58:54 +02:00
response = session . post ( url , data = filters )
2016-03-01 15:32:58 +01:00
else :
2016-08-09 13:58:54 +02:00
response = session . get ( url )
return self . _check_response ( response )
2014-04-11 18:45:52 +02:00
2016-08-09 13:58:54 +02:00
def get_event ( self , event_id ) :
2017-01-16 15:27:44 +01:00
""" Get an event
2014-04-16 14:09:56 +02:00
2017-01-16 15:27:44 +01:00
: param event_id : Event id to get
2014-04-11 18:45:52 +02:00
"""
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2015-08-12 13:23:38 +02:00
url = urljoin ( self . root_url , ' events/ {} ' . format ( event_id ) )
2016-08-09 13:58:54 +02:00
response = session . get ( url )
return self . _check_response ( response )
2014-04-11 18:45:52 +02:00
2016-08-09 13:58:54 +02:00
def add_event ( self , event ) :
2017-01-16 15:27:44 +01:00
""" Add a new event
2017-01-16 20:41:32 +01:00
2017-03-15 14:10:51 +01:00
: param event : Event as JSON object / string to add
2014-04-11 18:45:52 +02:00
"""
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2015-08-10 11:58:20 +02:00
url = urljoin ( self . root_url , ' events ' )
2017-03-15 14:10:51 +01:00
if isinstance ( event , MISPEvent ) :
2017-09-12 16:46:06 +02:00
event = json . dumps ( event , cls = MISPEncode )
2016-08-09 13:58:54 +02:00
if isinstance ( event , basestring ) :
response = session . post ( url , data = event )
2015-07-30 15:53:34 +02:00
else :
2016-08-09 13:58:54 +02:00
response = session . post ( url , data = json . dumps ( event ) )
return self . _check_response ( response )
2014-04-11 18:45:52 +02:00
2016-08-09 13:58:54 +02:00
def update_event ( self , event_id , event ) :
2017-01-16 15:27:44 +01:00
""" Update an event
2014-04-16 14:09:56 +02:00
2017-01-16 15:27:44 +01:00
: param event_id : Event id to update
2017-03-15 14:10:51 +01:00
: param event : Event as JSON object / string to add
2014-04-11 18:45:52 +02:00
"""
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2015-08-12 13:23:38 +02:00
url = urljoin ( self . root_url , ' events/ {} ' . format ( event_id ) )
2017-03-15 14:10:51 +01:00
if isinstance ( event , MISPEvent ) :
2017-09-12 16:46:06 +02:00
event = json . dumps ( event , cls = MISPEncode )
2016-08-09 13:58:54 +02:00
if isinstance ( event , basestring ) :
response = session . post ( url , data = event )
2015-07-30 15:53:34 +02:00
else :
2016-08-09 13:58:54 +02:00
response = session . post ( url , data = json . dumps ( event ) )
return self . _check_response ( response )
2014-04-11 18:45:52 +02:00
2016-08-09 13:58:54 +02:00
def delete_event ( self , event_id ) :
2017-01-16 15:27:44 +01:00
""" Delete an event
2014-04-16 14:09:56 +02:00
2017-01-16 15:27:44 +01:00
: param event_id : Event id to delete
2014-04-11 18:45:52 +02:00
"""
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2015-08-12 13:23:38 +02:00
url = urljoin ( self . root_url , ' events/ {} ' . format ( event_id ) )
2016-08-09 13:58:54 +02:00
response = session . delete ( url )
return self . _check_response ( response )
2014-04-11 18:45:52 +02:00
2017-10-22 20:02:47 +02:00
def delete_attribute ( self , attribute_id , hard_delete = False ) :
2017-10-21 20:22:38 +02:00
""" Delete an attribute by ID """
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2017-10-22 20:02:47 +02:00
if hard_delete :
url = urljoin ( self . root_url , ' attributes/delete/ {} /1 ' . format ( attribute_id ) )
else :
url = urljoin ( self . root_url , ' attributes/delete/ {} ' . format ( attribute_id ) )
response = session . get ( url )
2016-08-09 13:58:54 +02:00
return self . _check_response ( response )
2015-08-28 17:03:35 +02:00
2017-08-03 17:17:37 +02:00
def pushEventToZMQ ( self , event_id ) :
2017-10-21 20:22:38 +02:00
""" Force push an event on ZMQ """
2017-08-03 17:17:37 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' events/pushEventToZMQ/ {} .json ' . format ( event_id ) )
response = session . post ( url )
return self . _check_response ( response )
2015-09-01 18:46:10 +02:00
# ##############################################
2017-03-09 16:32:51 +01:00
# ############### Event handling ###############
2015-09-01 18:46:10 +02:00
# ##############################################
2015-09-23 18:47:47 +02:00
def get ( self , eid ) :
2017-10-21 20:22:38 +02:00
""" Get an event by event ID """
2016-09-26 00:26:09 +02:00
return self . get_event ( eid )
2016-07-14 13:55:37 +02:00
2015-09-23 18:47:47 +02:00
def update ( self , event ) :
2017-10-21 20:22:38 +02:00
""" Update an event by ID """
2017-01-16 20:47:43 +01:00
e = self . _make_mispevent ( event )
if e . uuid :
eid = e . uuid
2017-01-16 20:41:32 +01:00
else :
2017-01-16 20:47:43 +01:00
eid = e . id
2017-03-15 14:10:51 +01:00
return self . update_event ( eid , e )
2017-08-04 19:30:40 +02:00
2017-08-03 13:19:48 +02:00
def publish ( self , event , alert = True ) :
""" Publish event (with or without alert email)
: param event : pass event or event id ( as string or int ) to publish
: param alert : set to True by default ( send alerting email ) if False will not send alert
: return publish status
"""
2017-08-04 19:30:40 +02:00
if isinstance ( event , int ) or ( isinstance ( event , basestring ) and event . is_digit ( ) ) :
2017-08-03 13:19:48 +02:00
full_event = self . _make_mispevent ( self . get_event ( event ) )
else :
full_event = self . _make_mispevent ( event )
2017-08-04 19:30:40 +02:00
event_id = full_event . id
2017-08-03 13:19:48 +02:00
if full_event . published :
2015-09-23 18:47:47 +02:00
return { ' error ' : ' Already published ' }
2017-08-03 13:19:48 +02:00
session = self . __prepare_session ( )
if not alert :
url = urljoin ( self . root_url , ' events/publish/ {} ' . format ( event_id ) )
else :
url = urljoin ( self . root_url , ' events/alert/ {} ' . format ( event_id ) )
response = session . post ( url )
2017-08-03 13:22:48 +02:00
return self . _check_response ( response )
2016-09-26 00:26:09 +02:00
def change_threat_level ( self , event , threat_level_id ) :
2017-10-21 20:22:38 +02:00
""" Change the threat level of an event """
2017-01-16 20:47:43 +01:00
e = self . _make_mispevent ( event )
2016-09-27 19:47:22 +02:00
e . threat_level_id = threat_level_id
2017-01-28 11:24:04 +01:00
return self . update ( e )
2016-09-26 00:26:09 +02:00
2017-01-09 16:19:20 +01:00
def change_sharing_group ( self , event , sharing_group_id ) :
2017-10-21 20:22:38 +02:00
""" Change the sharing group of an event """
2017-01-16 20:47:43 +01:00
e = self . _make_mispevent ( event )
2017-01-09 16:19:20 +01:00
e . distribution = 4 # Needs to be 'Sharing group'
e . sharing_group_id = sharing_group_id
2017-01-28 11:24:04 +01:00
return self . update ( e )
2017-01-09 16:19:20 +01:00
def new_event ( self , distribution = None , threat_level_id = None , analysis = None , info = None , date = None , published = False , orgc_id = None , org_id = None , sharing_group_id = None ) :
2017-10-21 20:22:38 +02:00
""" Create and add a new event """
2017-01-09 16:19:20 +01:00
misp_event = self . _prepare_full_event ( distribution , threat_level_id , analysis , info , date , published , orgc_id , org_id , sharing_group_id )
2017-03-15 14:10:51 +01:00
return self . add_event ( misp_event )
2015-09-17 00:51:45 +02:00
2017-02-09 11:59:38 +01:00
def tag ( self , uuid , tag ) :
2017-10-21 20:22:38 +02:00
""" Tag an event or an attribute """
2017-03-08 17:02:34 +01:00
if not self . _valid_uuid ( uuid ) :
raise PyMISPError ( ' Invalid UUID ' )
2017-02-09 11:59:38 +01:00
session = self . __prepare_session ( )
2017-03-15 11:33:24 +01:00
to_post = { ' uuid ' : uuid , ' tag ' : tag }
2017-03-09 15:36:27 +01:00
path = ' tags/attachTagToObject '
response = session . post ( urljoin ( self . root_url , path ) , data = json . dumps ( to_post ) )
2017-02-09 11:59:38 +01:00
return self . _check_response ( response )
def untag ( self , uuid , tag ) :
2017-10-21 20:22:38 +02:00
""" Untag an event or an attribute """
2017-03-08 17:02:34 +01:00
if not self . _valid_uuid ( uuid ) :
raise PyMISPError ( ' Invalid UUID ' )
2017-02-09 11:59:38 +01:00
session = self . __prepare_session ( )
2017-03-15 11:33:24 +01:00
to_post = { ' uuid ' : uuid , ' tag ' : tag }
2017-03-09 15:36:27 +01:00
path = ' tags/removeTagFromObject '
response = session . post ( urljoin ( self . root_url , path ) , data = json . dumps ( to_post ) )
2017-02-09 11:59:38 +01:00
return self . _check_response ( response )
2015-09-02 11:50:15 +02:00
# ##### File attributes #####
2015-10-30 17:23:25 +01:00
def _send_attributes ( self , event , attributes , proposal = False ) :
2017-10-21 20:22:38 +02:00
""" Helper to add new attributes to an existing events """
2017-03-15 11:33:24 +01:00
eventID_to_update = None
2017-03-09 16:32:51 +01:00
if isinstance ( event , MISPEvent ) :
2017-03-15 11:33:24 +01:00
if hasattr ( event , ' id ' ) :
eventID_to_update = event . id
elif hasattr ( event , ' uuid ' ) :
eventID_to_update = event . uuid
2017-03-09 16:32:51 +01:00
elif isinstance ( event , int ) or ( isinstance ( event , str ) and ( event . isdigit ( ) or self . _valid_uuid ( event ) ) ) :
2017-03-15 11:33:24 +01:00
eventID_to_update = event
2015-10-30 17:23:25 +01:00
else :
2016-10-10 12:24:17 +02:00
e = MISPEvent ( self . describe_types )
2016-09-27 19:47:22 +02:00
e . load ( event )
2017-03-15 11:33:24 +01:00
if hasattr ( e , ' id ' ) :
eventID_to_update = e . id
elif hasattr ( e , ' uuid ' ) :
eventID_to_update = e . uuid
if eventID_to_update is None :
raise PyMISPError ( " Unable to find the ID of the event to update " )
2017-04-14 14:41:55 +02:00
if not attributes :
return { ' error ' : ' No attributes. ' }
2017-03-15 11:33:24 +01:00
for a in attributes :
if proposal :
2017-04-04 11:07:29 +02:00
response = self . proposal_add ( eventID_to_update , a )
2017-03-15 11:33:24 +01:00
else :
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' attributes/add/ {} ' . format ( eventID_to_update ) )
2017-09-12 16:46:06 +02:00
response = self . _check_response ( session . post ( url , data = json . dumps ( a , cls = MISPEncode ) ) )
2016-08-11 19:30:31 +02:00
return response
2015-09-02 11:50:15 +02:00
2017-01-13 14:15:53 +01:00
def add_named_attribute ( self , event , type_value , value , category = None , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add one or more attributes to an existing event """
2016-05-05 11:05:59 +02:00
attributes = [ ]
2016-09-28 18:20:37 +02:00
for value in self . _one_or_more ( value ) :
2017-01-13 14:15:53 +01:00
attributes . append ( self . _prepare_full_attribute ( category , type_value , value , to_ids , comment , distribution , * * kwargs ) )
2016-05-05 11:05:59 +02:00
return self . _send_attributes ( event , attributes , proposal )
2017-04-11 15:35:41 +02:00
def add_hashes ( self , event , category = ' Artifacts dropped ' , filename = None , md5 = None , sha1 = None , sha256 = None , ssdeep = None , comment = None , to_ids = True , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add hashe(s) to an existing event """
2015-09-01 18:46:10 +02:00
attributes = [ ]
type_value = ' {} '
2017-02-23 16:59:37 +01:00
value = ' '
2015-09-01 18:46:10 +02:00
if filename :
type_value = ' filename| {} '
2017-02-23 16:59:37 +01:00
value = filename + ' | '
2015-09-01 18:46:10 +02:00
if md5 :
2017-02-23 16:59:37 +01:00
attributes . append ( self . _prepare_full_attribute ( category , type_value . format ( ' md5 ' ) , value + md5 , to_ids , comment , distribution ) )
2015-09-01 18:46:10 +02:00
if sha1 :
2017-02-23 16:59:37 +01:00
attributes . append ( self . _prepare_full_attribute ( category , type_value . format ( ' sha1 ' ) , value + sha1 , to_ids , comment , distribution ) )
2015-09-01 18:46:10 +02:00
if sha256 :
2017-02-23 16:59:37 +01:00
attributes . append ( self . _prepare_full_attribute ( category , type_value . format ( ' sha256 ' ) , value + sha256 , to_ids , comment , distribution ) )
2016-04-12 19:42:01 +02:00
if ssdeep :
2017-02-23 16:59:37 +01:00
attributes . append ( self . _prepare_full_attribute ( category , type_value . format ( ' ssdeep ' ) , value + ssdeep , to_ids , comment , distribution ) )
2016-04-12 19:42:01 +02:00
2015-10-30 17:23:25 +01:00
return self . _send_attributes ( event , attributes , proposal )
2015-09-01 18:46:10 +02:00
2017-04-11 15:35:41 +02:00
def av_detection_link ( self , event , link , category = ' Antivirus detection ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add AV detection link(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' link ' , link , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-06-28 13:12:37 +02:00
2017-04-11 15:35:41 +02:00
def add_detection_name ( self , event , name , category = ' Antivirus detection ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add AV detection name(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' text ' , name , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-07-26 19:13:29 +02:00
2017-04-11 15:35:41 +02:00
def add_filename ( self , event , filename , category = ' Artifacts dropped ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add filename(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' filename ' , filename , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-04-15 13:47:13 +02:00
2017-04-11 15:35:41 +02:00
def add_attachment ( self , event , attachment , category = ' Artifacts dropped ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-01-16 20:41:32 +01:00
""" Add an attachment to the MISP event
2017-01-13 14:15:53 +01:00
: param event : The event to add an attachment to
: param attachment : Either a file handle or a path to a file - will be uploaded
"""
2017-03-09 16:32:51 +01:00
if isinstance ( attachment , basestring ) and os . path . isfile ( attachment ) :
# We have a file to open
filename = os . path . basename ( attachment )
with open ( attachment , " rb " ) as f :
fileData = f . read ( )
elif hasattr ( attachment , " read " ) :
# It's a file handle - we can read it but it has no filename
2017-01-13 14:15:53 +01:00
fileData = attachment . read ( )
2017-03-09 16:32:51 +01:00
filename = ' attachment '
elif isinstance ( attachment , ( tuple , list ) ) :
# tuple/list (filename, pseudofile)
filename = attachment [ 0 ]
if hasattr ( attachment [ 1 ] , " read " ) :
# Pseudo file
fileData = attachment [ 1 ] . read ( )
2017-01-13 14:15:53 +01:00
else :
2017-03-09 16:32:51 +01:00
fileData = attachment [ 1 ]
else :
# Plain file content, no filename
filename = ' attachment '
fileData = attachment
if not isinstance ( fileData , bytes ) :
fileData = fileData . encode ( )
2017-01-13 14:15:53 +01:00
# by now we have a string for the file
# we just need to b64 encode it and send it on its way
# also, just decode it to utf-8 to avoid the b'string' format
2017-03-09 16:32:51 +01:00
encodedData = base64 . b64encode ( fileData ) . decode ( " utf-8 " )
2017-01-16 20:41:32 +01:00
2017-01-13 14:15:53 +01:00
# Send it on its way
2017-10-09 21:54:20 +02:00
return self . add_named_attribute ( event , ' attachment ' , filename , category , to_ids , comment , distribution , proposal , data = encodedData , * * kwargs )
2017-01-16 20:41:32 +01:00
2017-04-11 15:35:41 +02:00
def add_regkey ( self , event , regkey , rvalue = None , category = ' Artifacts dropped ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add a registry key """
2015-09-01 18:46:10 +02:00
if rvalue :
type_value = ' regkey|value '
value = ' {} | {} ' . format ( regkey , rvalue )
else :
type_value = ' regkey '
value = regkey
attributes = [ ]
attributes . append ( self . _prepare_full_attribute ( category , type_value , value , to_ids , comment , distribution ) )
2015-10-30 17:23:25 +01:00
return self . _send_attributes ( event , attributes , proposal )
2015-09-01 18:46:10 +02:00
2017-04-11 15:35:41 +02:00
def add_regkeys ( self , event , regkeys_values , category = ' Artifacts dropped ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add a registry keys """
2016-08-16 11:44:08 +02:00
attributes = [ ]
for regkey , rvalue in regkeys_values . items ( ) :
2016-12-01 10:49:12 +01:00
if rvalue is not None :
2016-08-16 11:44:08 +02:00
type_value = ' regkey|value '
value = ' {} | {} ' . format ( regkey , rvalue )
else :
type_value = ' regkey '
value = regkey
attributes . append ( self . _prepare_full_attribute ( category , type_value , value , to_ids , comment , distribution ) )
return self . _send_attributes ( event , attributes , proposal )
2017-04-11 15:35:41 +02:00
def add_pattern ( self , event , pattern , in_file = True , in_memory = False , category = ' Artifacts dropped ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add a pattern(s) in file or in memory """
2016-11-29 09:14:18 +01:00
if not ( in_file or in_memory ) :
raise PyMISPError ( ' Invalid pattern type: please use in_memory=True or in_file=True ' )
itemtype = ' pattern-in-file ' if in_file else ' pattern-in-memory '
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , itemtype , pattern , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-01 18:46:10 +02:00
2017-04-11 15:35:41 +02:00
def add_pipe ( self , event , named_pipe , category = ' Artifacts dropped ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add pipes(s) """
2016-11-29 09:14:18 +01:00
def scrub ( s ) :
if not s . startswith ( ' \\ . \\ pipe \\ ' ) :
s = ' \\ . \\ pipe \\ {} ' . format ( s )
return s
2016-12-01 14:26:59 +01:00
attributes = list ( map ( scrub , self . _one_or_more ( named_pipe ) ) )
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' named pipe ' , attributes , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-01 18:46:10 +02:00
2017-04-11 15:35:41 +02:00
def add_mutex ( self , event , mutex , category = ' Artifacts dropped ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add mutex(es) """
2016-11-29 09:14:18 +01:00
def scrub ( s ) :
if not s . startswith ( ' \\ BaseNamedObjects \\ ' ) :
s = ' \\ BaseNamedObjects \\ {} ' . format ( s )
2016-12-07 10:54:22 +01:00
return s
2016-12-01 14:26:59 +01:00
attributes = list ( map ( scrub , self . _one_or_more ( mutex ) ) )
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' mutex ' , attributes , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-06 16:57:28 +02:00
2017-04-11 15:35:41 +02:00
def add_yara ( self , event , yara , category = ' Payload delivery ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add yara rule(es) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' yara ' , yara , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-04-20 10:16:44 +02:00
2015-09-02 11:50:15 +02:00
# ##### Network attributes #####
2017-04-11 15:35:41 +02:00
def add_ipdst ( self , event , ipdst , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add destination IP(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' ip-dst ' , ipdst , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-02 11:50:15 +02:00
2017-04-11 15:35:41 +02:00
def add_ipsrc ( self , event , ipsrc , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add source IP(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' ip-src ' , ipsrc , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-11-05 09:35:43 +01:00
2017-04-11 15:35:41 +02:00
def add_hostname ( self , event , hostname , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add hostname(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' hostname ' , hostname , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-02 11:50:15 +02:00
2017-04-11 15:35:41 +02:00
def add_domain ( self , event , domain , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add domain(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' domain ' , domain , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-02 11:50:15 +02:00
2017-04-11 15:35:41 +02:00
def add_domain_ip ( self , event , domain , ip , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add domain|ip """
2017-03-10 14:48:11 +01:00
if isinstance ( ip , str ) :
ip = [ ip ]
2016-12-01 14:26:59 +01:00
composed = list ( map ( lambda x : ' %s | %s ' % ( domain , x ) , ip ) )
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' domain|ip ' , composed , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-06-27 16:53:13 +02:00
2017-04-11 15:35:41 +02:00
def add_domains_ips ( self , event , domain_ips , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add multiple domain|ip """
2016-12-01 14:26:59 +01:00
composed = list ( map ( lambda x : ' %s | %s ' % ( x [ 0 ] , x [ 1 ] ) , domain_ips . items ( ) ) )
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' domain|ip ' , composed , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-08-16 11:44:08 +02:00
2017-04-11 15:35:41 +02:00
def add_url ( self , event , url , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add url(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' url ' , url , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-02 11:50:15 +02:00
2017-04-11 15:35:41 +02:00
def add_useragent ( self , event , useragent , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add user agent(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' user-agent ' , useragent , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-02 11:50:15 +02:00
2017-04-11 15:35:41 +02:00
def add_traffic_pattern ( self , event , pattern , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add pattern(s) in traffic """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' pattern-in-traffic ' , pattern , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-02 11:50:15 +02:00
2017-04-11 15:35:41 +02:00
def add_snort ( self , event , snort , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add SNORT rule(s) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' snort ' , snort , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-09-01 18:46:10 +02:00
2017-04-11 15:35:41 +02:00
def add_net_other ( self , event , netother , category = ' Network activity ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add a free text entry """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' other ' , netother , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-10-20 10:49:06 +02:00
2015-10-06 16:52:58 +02:00
# ##### Email attributes #####
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_email_src ( self , event , email , category = ' Payload delivery ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add a source email """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' email-src ' , email , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-06 16:52:58 +02:00
2017-04-11 15:35:41 +02:00
def add_email_dst ( self , event , email , category = ' Payload delivery ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add a destination email """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' email-dst ' , email , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-06 16:52:58 +02:00
2017-04-11 15:35:41 +02:00
def add_email_subject ( self , event , email , category = ' Payload delivery ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an email subject """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' email-subject ' , email , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-06 16:52:58 +02:00
2017-04-11 15:35:41 +02:00
def add_email_attachment ( self , event , email , category = ' Payload delivery ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an email atachment """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' email-attachment ' , email , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2015-10-06 16:52:58 +02:00
# ##### Target attributes #####
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_target_email ( self , event , target , category = ' Targeting data ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an target email """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' target-email ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_target_user ( self , event , target , category = ' Targeting data ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an target user """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' target-user ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_target_machine ( self , event , target , category = ' Targeting data ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an target machine """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' target-machine ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_target_org ( self , event , target , category = ' Targeting data ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an target organisation """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' target-org ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_target_location ( self , event , target , category = ' Targeting data ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an target location """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' target-location ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2017-04-11 15:35:41 +02:00
def add_target_external ( self , event , target , category = ' Targeting data ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an target external """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' target-external ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2015-10-30 17:23:25 +01:00
2016-03-14 12:17:53 +01:00
# ##### Attribution attributes #####
2017-04-11 15:35:41 +02:00
def add_threat_actor ( self , event , target , category = ' Attribution ' , to_ids = True , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an threat actor """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' threat-actor ' , target , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-04-14 10:29:36 +02:00
2016-04-13 21:40:31 +02:00
# ##### Internal reference attributes #####
2016-04-14 10:29:36 +02:00
2017-04-11 15:35:41 +02:00
def add_internal_link ( self , event , reference , category = ' Internal reference ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an internal link """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' link ' , reference , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-04-14 10:29:36 +02:00
2017-04-11 15:35:41 +02:00
def add_internal_comment ( self , event , reference , category = ' Internal reference ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an internal comment """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' comment ' , reference , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-04-14 10:29:36 +02:00
2017-04-11 15:35:41 +02:00
def add_internal_text ( self , event , reference , category = ' Internal reference ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an internal text """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' text ' , reference , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-04-14 10:29:36 +02:00
2017-04-11 15:35:41 +02:00
def add_internal_other ( self , event , reference , category = ' Internal reference ' , to_ids = False , comment = None , distribution = None , proposal = False , * * kwargs ) :
2017-10-21 20:22:38 +02:00
""" Add an internal reference (type other) """
2017-04-11 15:35:41 +02:00
return self . add_named_attribute ( event , ' other ' , reference , category , to_ids , comment , distribution , proposal , * * kwargs )
2016-03-14 12:17:53 +01:00
2015-09-01 10:31:22 +02:00
# ##################################################
# ######### Upload samples through the API #########
# ##################################################
2015-08-04 16:24:55 +02:00
2016-09-28 18:20:37 +02:00
def _prepare_upload ( self , event_id , distribution , to_ids , category , comment , info ,
analysis , threat_level_id ) :
2017-10-21 20:22:38 +02:00
""" Helper to prepare a sample to upload """
2015-08-06 09:49:44 +02:00
to_post = { ' request ' : { } }
2015-09-01 18:46:10 +02:00
2015-09-13 00:31:27 +02:00
if event_id is not None :
try :
event_id = int ( event_id )
2017-05-03 13:22:15 +02:00
except ValueError :
2015-09-13 00:31:27 +02:00
pass
2015-08-04 16:24:55 +02:00
if not isinstance ( event_id , int ) :
# New event
2016-10-05 11:43:33 +02:00
misp_event = self . _prepare_full_event ( distribution , threat_level_id , analysis , info )
to_post [ ' request ' ] [ ' distribution ' ] = misp_event . distribution
to_post [ ' request ' ] [ ' info ' ] = misp_event . info
to_post [ ' request ' ] [ ' analysis ' ] = misp_event . analysis
to_post [ ' request ' ] [ ' threat_level_id ' ] = misp_event . threat_level_id
2015-08-04 16:24:55 +02:00
else :
2015-09-01 18:46:10 +02:00
to_post [ ' request ' ] [ ' event_id ' ] = int ( event_id )
2015-08-04 16:24:55 +02:00
2016-09-28 18:20:37 +02:00
default_values = self . sane_default [ ' malware-sample ' ]
if to_ids is None or not isinstance ( to_ids , bool ) :
to_ids = bool ( int ( default_values [ ' to_ids ' ] ) )
2015-09-01 18:46:10 +02:00
to_post [ ' request ' ] [ ' to_ids ' ] = to_ids
2015-08-04 16:24:55 +02:00
2016-09-28 18:20:37 +02:00
if category is None or category not in self . categories :
category = default_values [ ' default_category ' ]
2015-09-01 18:46:10 +02:00
to_post [ ' request ' ] [ ' category ' ] = category
2015-08-04 16:24:55 +02:00
2016-04-15 15:52:50 +02:00
to_post [ ' request ' ] [ ' comment ' ] = comment
2015-08-06 01:57:59 +02:00
return to_post
2017-07-18 11:26:09 +02:00
def _encode_file_to_upload ( self , filepath_or_bytes ) :
2017-10-21 20:22:38 +02:00
""" Helper to encode a file to upload """
2017-07-18 11:26:09 +02:00
if isinstance ( filepath_or_bytes , basestring ) and os . path . isfile ( filepath_or_bytes ) :
with open ( filepath_or_bytes , ' rb ' ) as f :
binblob = f . read ( )
else :
binblob = filepath_or_bytes
return base64 . b64encode ( binblob ) . decode ( )
2015-08-06 01:57:59 +02:00
2017-07-18 11:26:09 +02:00
def upload_sample ( self , filename , filepath_or_bytes , event_id , distribution = None ,
2016-08-16 18:35:34 +02:00
to_ids = True , category = None , comment = None , info = None ,
analysis = None , threat_level_id = None ) :
2017-10-21 20:22:38 +02:00
""" Upload a sample """
2016-09-28 18:20:37 +02:00
to_post = self . _prepare_upload ( event_id , distribution , to_ids , category ,
comment , info , analysis , threat_level_id )
2017-07-18 11:26:09 +02:00
to_post [ ' request ' ] [ ' files ' ] = [ { ' filename ' : filename , ' data ' : self . _encode_file_to_upload ( filepath_or_bytes ) } ]
2015-08-06 01:57:59 +02:00
return self . _upload_sample ( to_post )
2016-08-16 18:35:34 +02:00
def upload_samplelist ( self , filepaths , event_id , distribution = None ,
2016-10-05 11:07:40 +02:00
to_ids = True , category = None , comment = None , info = None ,
2016-08-16 18:35:34 +02:00
analysis = None , threat_level_id = None ) :
2017-10-21 20:22:38 +02:00
""" Upload a list of samples """
2016-09-28 18:20:37 +02:00
to_post = self . _prepare_upload ( event_id , distribution , to_ids , category ,
2016-10-05 11:07:40 +02:00
comment , info , analysis , threat_level_id )
2015-08-28 17:03:35 +02:00
files = [ ]
for path in filepaths :
if not os . path . isfile ( path ) :
continue
files . append ( { ' filename ' : os . path . basename ( path ) , ' data ' : self . _encode_file_to_upload ( path ) } )
to_post [ ' request ' ] [ ' files ' ] = files
2015-08-06 01:57:59 +02:00
return self . _upload_sample ( to_post )
2015-08-04 16:24:55 +02:00
2015-08-06 01:57:59 +02:00
def _upload_sample ( self , to_post ) :
2017-10-21 20:22:38 +02:00
""" Helper to upload a sample """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-08-10 11:58:20 +02:00
url = urljoin ( self . root_url , ' events/upload_sample ' )
2015-09-23 18:47:47 +02:00
response = session . post ( url , data = json . dumps ( to_post ) )
return self . _check_response ( response )
2015-08-04 16:24:55 +02:00
2015-10-30 17:23:25 +01:00
# ############################
# ######## Proposals #########
# ############################
def __query_proposal ( self , session , path , id , attribute = None ) :
2017-10-21 20:22:38 +02:00
""" Helper to prepare a query to handle proposals """
2015-10-30 17:23:25 +01:00
url = urljoin ( self . root_url , ' shadow_attributes/ {} / {} ' . format ( path , id ) )
if path in [ ' add ' , ' edit ' ] :
query = { ' request ' : { ' ShadowAttribute ' : attribute } }
2017-09-12 16:46:06 +02:00
response = session . post ( url , data = json . dumps ( query , cls = MISPEncode ) )
2016-09-28 18:20:37 +02:00
elif path == ' view ' :
2015-10-30 17:23:25 +01:00
response = session . get ( url )
2016-09-28 18:20:37 +02:00
else : # accept or discard
response = session . post ( url )
2015-10-30 17:23:25 +01:00
return self . _check_response ( response )
def proposal_view ( self , event_id = None , proposal_id = None ) :
2017-10-21 20:22:38 +02:00
""" View a proposal """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-10-30 17:23:25 +01:00
if proposal_id is not None and event_id is not None :
return { ' error ' : ' You can only view an event ID or a proposal ID ' }
if event_id is not None :
id = event_id
else :
id = proposal_id
return self . __query_proposal ( session , ' view ' , id )
def proposal_add ( self , event_id , attribute ) :
2017-10-21 20:22:38 +02:00
""" Add a proposal """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-10-30 17:23:25 +01:00
return self . __query_proposal ( session , ' add ' , event_id , attribute )
def proposal_edit ( self , attribute_id , attribute ) :
2017-10-21 20:22:38 +02:00
""" Edit a proposal """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-10-30 17:23:25 +01:00
return self . __query_proposal ( session , ' edit ' , attribute_id , attribute )
def proposal_accept ( self , proposal_id ) :
2017-10-21 20:22:38 +02:00
""" Accept a proposal """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-10-30 17:23:25 +01:00
return self . __query_proposal ( session , ' accept ' , proposal_id )
def proposal_discard ( self , proposal_id ) :
2017-10-21 20:22:38 +02:00
""" Discard a proposal """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-10-30 17:23:25 +01:00
return self . __query_proposal ( session , ' discard ' , proposal_id )
2016-12-09 11:42:07 +01:00
# ##############################
# ###### Attribute update ######
# ##############################
def change_toids ( self , attribute_uuid , to_ids ) :
2017-10-21 20:22:38 +02:00
""" Change the toids flag """
2016-12-09 11:42:07 +01:00
if to_ids not in [ 0 , 1 ] :
raise Exception ( ' to_ids can only be 0 or 1 ' )
query = { " to_ids " : to_ids }
session = self . __prepare_session ( )
return self . __query ( session , ' edit/ {} ' . format ( attribute_uuid ) , query , controller = ' attributes ' )
2017-06-13 15:37:39 +02:00
# ##############################
# ###### Attribute update ######
# ##############################
def freetext ( self , event_id , string , adhereToWarninglists = False , distribution = None ) :
2017-10-21 20:22:38 +02:00
""" Pass a text to the freetext importer """
2017-06-13 15:37:39 +02:00
query = { " value " : string }
wl_params = [ False , True , ' soft ' ]
if adhereToWarninglists not in wl_params :
raise Exception ( ' Invalid parameter, adhereToWarninglists Can only be {} ' . format ( ' , ' . join ( wl_params ) ) )
if adhereToWarninglists :
query [ ' adhereToWarninglists ' ] = adhereToWarninglists
if distribution is not None :
query [ ' distribution ' ] = distribution
session = self . __prepare_session ( )
return self . __query ( session , ' freeTextImport/ {} ' . format ( event_id ) , query , controller = ' events ' )
2015-09-01 10:31:22 +02:00
# ##############################
2014-04-14 10:55:20 +02:00
# ######## REST Search #########
2015-09-01 10:31:22 +02:00
# ##############################
2014-04-11 18:45:52 +02:00
2017-05-03 11:13:18 +02:00
def __query ( self , session , path , query , controller = ' events ' , async_callback = None ) :
2017-10-21 20:22:38 +02:00
""" Helper to prepare a search query """
2015-09-21 11:52:26 +02:00
if query . get ( ' error ' ) is not None :
return query
2016-08-04 13:21:28 +02:00
if controller not in [ ' events ' , ' attributes ' ] :
raise Exception ( ' Invalid controller. Can only be {} ' . format ( ' , ' . join ( [ ' events ' , ' attributes ' ] ) ) )
url = urljoin ( self . root_url , ' {} / {} ' . format ( controller , path . lstrip ( ' / ' ) ) )
2017-05-31 18:07:31 +02:00
logger . debug ( ' URL: %s ' , url )
logger . debug ( ' Query: %s ' , query )
2017-05-03 11:13:18 +02:00
2017-05-16 11:27:54 +02:00
if ASYNC_OK and isinstance ( session , FuturesSession ) and async_callback :
2017-05-03 11:13:18 +02:00
response = session . post ( url , data = json . dumps ( query ) , background_callback = async_callback )
else :
response = session . post ( url , data = json . dumps ( query ) )
return self . _check_response ( response )
2015-09-21 11:52:26 +02:00
2016-04-04 18:34:08 +02:00
def search_index ( self , published = None , eventid = None , tag = None , datefrom = None ,
2017-01-24 09:46:31 +01:00
dateuntil = None , eventinfo = None , threatlevel = None , distribution = None ,
2017-05-03 10:47:44 +02:00
analysis = None , attribute = None , org = None , async_callback = None , normalize = False ) :
2017-01-16 15:27:44 +01:00
""" Search only at the index level. Use ! infront of value as NOT, default OR
2017-05-03 10:43:45 +02:00
If using async , give a callback that takes 2 args , session and response :
basic usage is
pymisp . search_index ( . . . , async_callback = lambda ses , resp : print ( resp . json ( ) ) )
2017-01-16 15:27:44 +01:00
: param published : Published ( 0 , 1 )
: param eventid : Evend ID ( s ) | str or list
: param tag : Tag ( s ) | str or list
: param datefrom : First date , in format YYYY - MM - DD
2017-01-24 08:46:14 +01:00
: param dateuntil : Last date , in format YYYY - MM - DD
2017-01-16 15:27:44 +01:00
: param eventinfo : Event info ( s ) to match | str or list
: param threatlevel : Threat level ( s ) ( 1 , 2 , 3 , 4 ) | str or list
: param distribution : Distribution level ( s ) ( 0 , 1 , 2 , 3 ) | str or list
: param analysis : Analysis level ( s ) ( 0 , 1 , 2 ) | str or list
: param org : Organisation ( s ) | str or list
2017-05-02 11:12:25 +02:00
: param async_callback : Function to call when the request returns ( if running async )
2017-05-03 10:32:14 +02:00
: param normalize : Normalize output | True or False
2016-03-31 13:33:04 +02:00
"""
2017-01-24 08:46:14 +01:00
allowed = { ' published ' : published , ' eventid ' : eventid , ' tag ' : tag , ' Dateuntil ' : dateuntil ,
2016-04-04 18:34:08 +02:00
' Datefrom ' : datefrom , ' eventinfo ' : eventinfo , ' threatlevel ' : threatlevel ,
' distribution ' : distribution , ' analysis ' : analysis , ' attribute ' : attribute ,
2017-01-24 07:56:54 +01:00
' org ' : org }
2016-04-04 18:34:08 +02:00
rule_levels = { ' distribution ' : [ " 0 " , " 1 " , " 2 " , " 3 " , " !0 " , " !1 " , " !2 " , " !3 " ] ,
' threatlevel ' : [ " 1 " , " 2 " , " 3 " , " 4 " , " !1 " , " !2 " , " !3 " , " !4 " ] ,
2017-01-24 07:56:54 +01:00
' analysis ' : [ " 0 " , " 1 " , " 2 " , " !0 " , " !1 " , " !2 " ] }
2016-03-31 13:33:04 +02:00
buildup_url = " events/index "
2017-04-13 14:51:45 +02:00
to_post = { }
2016-03-31 13:33:04 +02:00
for rule in allowed . keys ( ) :
2017-05-03 10:43:45 +02:00
2017-04-13 14:51:45 +02:00
if allowed . get ( rule ) is None :
continue
param = allowed [ rule ]
if not isinstance ( param , list ) :
param = [ param ]
param = [ x for x in map ( str , param ) ]
if rule in rule_levels :
if not set ( param ) . issubset ( rule_levels [ rule ] ) :
raise SearchError ( ' Values in your {} are invalid, has to be in {} ' . format ( rule , ' , ' . join ( str ( x ) for x in rule_levels [ rule ] ) ) )
to_post [ rule ] = ' | ' . join ( str ( x ) for x in param )
2017-05-03 16:49:51 +02:00
session = self . __prepare_session ( async_implemented = ( async_callback is not None ) )
2016-03-31 13:33:04 +02:00
url = urljoin ( self . root_url , buildup_url )
2017-02-16 16:39:36 +01:00
2017-05-03 11:13:18 +02:00
if self . asynch and async_callback :
2017-05-03 10:47:44 +02:00
response = session . post ( url , data = json . dumps ( to_post ) , background_callback = async_callback )
2017-02-16 16:39:36 +01:00
else :
2017-05-03 10:43:45 +02:00
response = session . post ( url , data = json . dumps ( to_post ) )
2017-05-03 10:47:44 +02:00
res = self . _check_response ( response )
if normalize :
to_return = { ' response ' : [ ] }
for elem in res [ ' response ' ] :
tmp = { ' Event ' : elem }
to_return [ ' response ' ] . append ( tmp )
res = to_return
return res
2016-03-18 09:38:04 +01:00
2015-08-06 17:42:41 +02:00
def search_all ( self , value ) :
2017-10-21 20:22:38 +02:00
""" Search a value in the whole database """
2015-08-06 17:42:41 +02:00
query = { ' value ' : value , ' searchall ' : 1 }
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-08-06 17:42:41 +02:00
return self . __query ( session , ' restSearch/download ' , query )
2014-04-11 18:45:52 +02:00
def __prepare_rest_search ( self , values , not_values ) :
2017-01-16 15:27:44 +01:00
""" Prepare a search, generate the chain processed by the server
2014-04-16 14:09:56 +02:00
2017-01-16 15:27:44 +01:00
: param values : Values to search
: param not_values : Values that should not be in the response
2014-04-11 18:45:52 +02:00
"""
to_return = ' '
if values is not None :
2014-04-14 10:55:20 +02:00
if not isinstance ( values , list ) :
2014-04-11 18:45:52 +02:00
to_return + = values
else :
to_return + = ' && ' . join ( values )
if not_values is not None :
2014-04-14 10:55:20 +02:00
if len ( to_return ) > 0 :
2014-04-11 18:45:52 +02:00
to_return + = ' &&! '
else :
to_return + = ' ! '
2017-05-19 18:19:21 +02:00
if not isinstance ( not_values , list ) :
2014-04-11 18:45:52 +02:00
to_return + = not_values
else :
to_return + = ' &&! ' . join ( not_values )
return to_return
2017-05-03 11:13:18 +02:00
def search ( self , controller = ' events ' , async_callback = None , * * kwargs ) :
2017-01-16 15:27:44 +01:00
""" Search via the Rest API
: param values : values to search for
: param not_values : values * not * to search for
: param type_attribute : Type of attribute
: param category : Category to search
: param org : Org reporting the event
: param tags : Tags to search for
: param not_tags : Tags * not * to search for
: param date_from : First date
: param date_to : Last date
: param last : Last updated events ( for example 5 d or 12 h or 30 m )
2017-01-24 09:41:30 +01:00
: param eventid : Last date
: param withAttachments : return events with or without the attachments
: param uuid : search by uuid
: param publish_timestamp : the publish timestamp
: param timestamp : the creation timestamp
: param enforceWarninglist : Enforce the warning lists
: param searchall : full text search on the database
: param metadata : return only metadata if True
: param published : return only published events
: param to_ids : return only the attributes with the to_ids flag set
: param deleted : also return the deleted attributes
2017-05-03 11:13:18 +02:00
: param async_callback : The function to run when results are returned
2014-04-11 18:45:52 +02:00
"""
2017-07-18 12:07:47 +02:00
query = { }
2017-01-24 09:41:30 +01:00
# Event: array('value', 'type', 'category', 'org', 'tags', 'from', 'to', 'last', 'eventid', 'withAttachments', 'uuid', 'publish_timestamp', 'timestamp', 'enforceWarninglist', 'searchall', 'metadata', 'published');
# Attribute: array('value', 'type', 'category', 'org', 'tags', 'from', 'to', 'last', 'eventid', 'withAttachments', 'uuid', 'publish_timestamp', 'timestamp', 'enforceWarninglist', 'to_ids', 'deleted');
2017-07-18 12:07:47 +02:00
val = self . __prepare_rest_search ( kwargs . pop ( ' values ' , None ) , kwargs . pop ( ' not_values ' , None ) )
2015-02-16 14:31:29 +01:00
if len ( val ) != 0 :
query [ ' value ' ] = val
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
query [ ' type ' ] = kwargs . pop ( ' type_attribute ' , None )
query [ ' category ' ] = kwargs . pop ( ' category ' , None )
query [ ' org ' ] = kwargs . pop ( ' org ' , None )
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
tag = self . __prepare_rest_search ( kwargs . pop ( ' tags ' , None ) , kwargs . pop ( ' not_tags ' , None ) )
2015-02-16 14:31:29 +01:00
if len ( tag ) != 0 :
query [ ' tags ' ] = tag
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
date_from = kwargs . pop ( ' date_from ' , None )
if date_from :
if isinstance ( date_from , datetime . date ) or isinstance ( date_from , datetime . datetime ) :
query [ ' from ' ] = date_from . strftime ( ' % Y- % m- %d ' )
2015-02-16 14:31:29 +01:00
else :
2017-07-18 12:07:47 +02:00
query [ ' from ' ] = date_from
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
date_to = kwargs . pop ( ' date_to ' , None )
if date_to :
if isinstance ( date_to , datetime . date ) or isinstance ( date_to , datetime . datetime ) :
query [ ' to ' ] = date_to . strftime ( ' % Y- % m- %d ' )
2015-02-16 14:31:29 +01:00
else :
2017-07-18 12:07:47 +02:00
query [ ' to ' ] = date_to
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
query [ ' last ' ] = kwargs . pop ( ' last ' , None )
query [ ' eventid ' ] = kwargs . pop ( ' eventid ' , None )
query [ ' withAttachments ' ] = kwargs . pop ( ' withAttachments ' , None )
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
uuid = kwargs . pop ( ' uuid ' , None )
if uuid :
if self . _valid_uuid ( uuid ) :
query [ ' uuid ' ] = uuid
2017-01-04 11:23:18 +01:00
else :
return { ' error ' : ' You must enter a valid uuid. ' }
2014-04-11 18:45:52 +02:00
2017-07-18 12:07:47 +02:00
query [ ' publish_timestamp ' ] = kwargs . pop ( ' publish_timestamp ' , None )
query [ ' timestamp ' ] = kwargs . pop ( ' timestamp ' , None )
query [ ' enforceWarninglist ' ] = kwargs . pop ( ' enforceWarninglist ' , None )
query [ ' to_ids ' ] = kwargs . pop ( ' to_ids ' , None )
query [ ' deleted ' ] = kwargs . pop ( ' deleted ' , None )
2017-01-24 09:41:30 +01:00
if controller == ' events ' :
# Event search only:
2017-07-18 12:07:47 +02:00
query [ ' searchall ' ] = kwargs . pop ( ' searchall ' , None )
query [ ' metadata ' ] = kwargs . pop ( ' metadata ' , None )
query [ ' published ' ] = kwargs . pop ( ' published ' , None )
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
# Cleanup
query = { k : v for k , v in query . items ( ) if v is not None }
2017-01-24 09:41:30 +01:00
2017-07-18 12:07:47 +02:00
if kwargs :
raise SearchError ( ' Unused parameter: {} ' . format ( ' , ' . join ( kwargs . keys ( ) ) ) )
2017-01-24 09:41:30 +01:00
2017-05-03 11:13:18 +02:00
# Create a session, make it async if and only if we have a callback
2017-05-03 16:49:51 +02:00
session = self . __prepare_session ( async_implemented = ( async_callback is not None ) )
2017-05-03 11:13:18 +02:00
return self . __query ( session , ' restSearch/download ' , query , controller , async_callback )
2014-04-11 18:45:52 +02:00
2017-07-18 10:55:49 +02:00
def get_attachment ( self , attribute_id ) :
""" Get an attachement (not a malware sample) by attribute ID.
Returns the attachment as a bytestream , or a dictionary containing the error message .
2014-04-16 14:09:56 +02:00
2017-07-18 10:55:49 +02:00
: param attribute_id : Attribute ID to fetched
2014-04-11 18:45:52 +02:00
"""
2017-07-18 10:55:49 +02:00
attach = urljoin ( self . root_url , ' attributes/downloadAttachment/download/ {} ' . format ( attribute_id ) )
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2016-08-09 13:58:54 +02:00
response = session . get ( attach )
2017-07-18 10:55:49 +02:00
try :
response . json ( )
# The query fails, response contains a json blob
return self . _check_response ( response )
except ValueError :
# content contains the attachment in binary
return response . content
2014-04-11 18:45:52 +02:00
2015-08-19 10:42:24 +02:00
def get_yara ( self , event_id ) :
2017-10-21 20:22:38 +02:00
""" Get the yara rules from an event """
2015-08-19 10:42:24 +02:00
to_post = { ' request ' : { ' eventid ' : event_id , ' type ' : ' yara ' } }
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-08-19 10:42:24 +02:00
response = session . post ( urljoin ( self . root_url , ' attributes/restSearch ' ) , data = json . dumps ( to_post ) )
2015-09-23 18:47:47 +02:00
result = self . _check_response ( response )
if result . get ( ' error ' ) is not None :
return False , result . get ( ' error ' )
if not result . get ( ' response ' ) :
2015-08-19 10:42:24 +02:00
return False , result . get ( ' message ' )
rules = ' \n \n ' . join ( [ a [ ' value ' ] for a in result [ ' response ' ] [ ' Attribute ' ] ] )
return True , rules
2015-08-07 17:24:03 +02:00
def download_samples ( self , sample_hash = None , event_id = None , all_samples = False ) :
2017-10-21 20:22:38 +02:00
""" Download samples, by hash or event ID. If there are multiple samples in one event, use the all_samples switch """
2015-08-07 17:24:03 +02:00
to_post = { ' request ' : { ' hash ' : sample_hash , ' eventID ' : event_id , ' allSamples ' : all_samples } }
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-08-07 17:24:03 +02:00
response = session . post ( urljoin ( self . root_url , ' attributes/downloadSample ' ) , data = json . dumps ( to_post ) )
2015-09-23 18:47:47 +02:00
result = self . _check_response ( response )
if result . get ( ' error ' ) is not None :
return False , result . get ( ' error ' )
if not result . get ( ' result ' ) :
2015-08-07 17:24:03 +02:00
return False , result . get ( ' message ' )
details = [ ]
for f in result [ ' result ' ] :
2015-09-12 23:08:06 +02:00
decoded = base64 . b64decode ( f [ ' base64 ' ] )
zipped = BytesIO ( decoded )
2015-08-24 12:05:49 +02:00
try :
2015-09-18 14:38:52 +02:00
archive = zipfile . ZipFile ( zipped )
2017-03-09 16:32:51 +01:00
if f . get ( ' md5 ' ) :
2015-09-18 14:38:52 +02:00
# New format
2016-07-21 13:43:04 +02:00
unzipped = BytesIO ( archive . open ( f [ ' md5 ' ] , pwd = b ' infected ' ) . read ( ) )
2017-03-09 16:32:51 +01:00
else :
2015-09-18 14:38:52 +02:00
# Old format
2016-07-21 13:43:04 +02:00
unzipped = BytesIO ( archive . open ( f [ ' filename ' ] , pwd = b ' infected ' ) . read ( ) )
2015-09-18 14:38:52 +02:00
details . append ( [ f [ ' event_id ' ] , f [ ' filename ' ] , unzipped ] )
except zipfile . BadZipfile :
# In case the sample isn't zipped
details . append ( [ f [ ' event_id ' ] , f [ ' filename ' ] , zipped ] )
2015-08-07 17:24:03 +02:00
return True , details
2015-08-05 17:20:59 +02:00
def download_last ( self , last ) :
2017-01-16 15:27:44 +01:00
""" Download the last updated events.
2015-08-05 17:20:59 +02:00
2017-01-16 15:27:44 +01:00
: param last : can be defined in days , hours , minutes ( for example 5 d or 12 h or 30 m )
2015-08-05 17:20:59 +02:00
"""
return self . search ( last = last )
2015-12-21 18:58:08 +01:00
# ########## Tags ##########
def get_all_tags ( self , quiet = False ) :
2017-10-21 20:22:38 +02:00
""" Get all the tags used on the instance """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2015-12-21 18:58:08 +01:00
url = urljoin ( self . root_url , ' tags ' )
response = session . get ( url )
r = self . _check_response ( response )
if not quiet or r . get ( ' errors ' ) :
return r
else :
to_return = [ ]
for tag in r [ ' Tag ' ] :
to_return . append ( tag [ ' name ' ] )
return to_return
2016-03-21 14:55:41 +01:00
def new_tag ( self , name = None , colour = " #00ace6 " , exportable = False ) :
2017-10-21 20:22:38 +02:00
""" Create a new tag """
2016-03-21 14:55:41 +01:00
to_post = { ' Tag ' : { ' name ' : name , ' colour ' : colour , ' exportable ' : exportable } }
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2016-03-14 12:17:53 +01:00
url = urljoin ( self . root_url , ' tags/add ' )
response = session . post ( url , data = json . dumps ( to_post ) )
return self . _check_response ( response )
2015-09-17 13:51:31 +02:00
# ########## Version ##########
2015-09-18 12:03:56 +02:00
def get_api_version ( self ) :
2017-01-16 15:27:44 +01:00
""" Returns the current version of PyMISP installed on the system """
2015-09-18 12:03:56 +02:00
return { ' version ' : __version__ }
def get_api_version_master ( self ) :
2017-01-16 15:27:44 +01:00
""" Get the most recent version of PyMISP from github """
2015-09-18 12:03:56 +02:00
r = requests . get ( ' https://raw.githubusercontent.com/MISP/PyMISP/master/pymisp/__init__.py ' )
if r . status_code == 200 :
version = re . findall ( " __version__ = ' (.*) ' " , r . text )
return { ' version ' : version [ 0 ] }
else :
2015-09-23 18:47:47 +02:00
return { ' error ' : ' Impossible to retrieve the version of the master branch. ' }
2015-09-18 12:03:56 +02:00
2017-01-27 11:58:00 +01:00
def get_recommended_api_version ( self ) :
""" Returns the recommended API version from the server """
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' servers/getPyMISPVersion.json ' )
response = session . get ( url )
return self . _check_response ( response )
2015-09-17 13:51:31 +02:00
def get_version ( self ) :
2017-01-16 15:27:44 +01:00
""" Returns the version of the instance. """
2016-08-11 17:45:32 +02:00
session = self . __prepare_session ( )
2016-08-18 13:18:58 +02:00
url = urljoin ( self . root_url , ' servers/getVersion.json ' )
2015-09-23 18:47:47 +02:00
response = session . get ( url )
return self . _check_response ( response )
2015-09-17 13:51:31 +02:00
def get_version_master ( self ) :
2017-01-16 15:27:44 +01:00
""" Get the most recent version from github """
2016-06-15 02:44:36 +02:00
r = requests . get ( ' https://raw.githubusercontent.com/MISP/MISP/2.4/VERSION.json ' )
2015-09-17 13:51:31 +02:00
if r . status_code == 200 :
master_version = json . loads ( r . text )
return { ' version ' : ' {} . {} . {} ' . format ( master_version [ ' major ' ] , master_version [ ' minor ' ] , master_version [ ' hotfix ' ] ) }
else :
2015-09-23 18:47:47 +02:00
return { ' error ' : ' Impossible to retrieve the version of the master branch. ' }
2016-10-21 13:42:22 +02:00
2016-04-28 13:29:54 +02:00
# ############## Statistics ##################
2016-08-09 13:58:54 +02:00
def get_attributes_statistics ( self , context = ' type ' , percentage = None ) :
2017-01-16 15:27:44 +01:00
""" Get attributes statistics from the MISP instance """
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2016-04-28 13:29:54 +02:00
if ( context != ' category ' ) :
2016-04-28 14:45:02 +02:00
context = ' type '
2016-06-15 04:44:08 +02:00
if percentage is not None :
2016-04-28 13:29:54 +02:00
url = urljoin ( self . root_url , ' attributes/attributeStatistics/ {} / {} ' . format ( context , percentage ) )
else :
url = urljoin ( self . root_url , ' attributes/attributeStatistics/ {} ' . format ( context ) )
2016-08-09 13:58:54 +02:00
response = session . get ( url )
return self . _check_response ( response )
2016-04-28 13:29:54 +02:00
2016-08-09 13:58:54 +02:00
def get_tags_statistics ( self , percentage = None , name_sort = None ) :
2017-01-16 15:27:44 +01:00
""" Get tags statistics from the MISP instance """
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2016-06-15 04:44:08 +02:00
if percentage is not None :
2016-05-23 15:16:31 +02:00
percentage = ' true '
else :
percentage = ' false '
2016-06-15 04:44:08 +02:00
if name_sort is not None :
2016-05-23 15:16:31 +02:00
name_sort = ' true '
else :
name_sort = ' false '
url = urljoin ( self . root_url , ' tags/tagStatistics/ {} / {} ' . format ( percentage , name_sort ) )
2016-09-12 12:53:58 +02:00
response = session . get ( url )
2016-08-09 13:58:54 +02:00
return self . _check_response ( response )
2016-05-23 15:16:31 +02:00
2016-07-01 10:33:44 +02:00
# ############## Sightings ##################
2016-04-29 16:35:27 +02:00
2016-08-09 13:58:54 +02:00
def sighting_per_id ( self , attribute_id ) :
2017-10-21 20:22:38 +02:00
""" Add a sighting to an attribute (by attribute ID) """
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2016-05-23 15:16:31 +02:00
url = urljoin ( self . root_url , ' sightings/add/ {} ' . format ( attribute_id ) )
2016-08-09 13:58:54 +02:00
response = session . post ( url )
return self . _check_response ( response )
2016-04-29 16:35:27 +02:00
2016-08-09 13:58:54 +02:00
def sighting_per_uuid ( self , attribute_uuid ) :
2017-10-21 20:22:38 +02:00
""" Add a sighting to an attribute (by attribute UUID) """
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2016-05-23 15:16:31 +02:00
url = urljoin ( self . root_url , ' sightings/add/ {} ' . format ( attribute_uuid ) )
2016-08-09 13:58:54 +02:00
response = session . post ( url )
return self . _check_response ( response )
2016-04-29 16:35:27 +02:00
2017-02-17 10:32:25 +01:00
def set_sightings ( self , sightings ) :
2017-10-21 20:22:38 +02:00
""" Push a sighting (python dictionary) """
2017-02-17 10:32:25 +01:00
if isinstance ( sightings , dict ) :
sightings = json . dumps ( sightings )
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2016-05-23 15:16:31 +02:00
url = urljoin ( self . root_url , ' sightings/add/ ' )
2017-02-17 10:32:25 +01:00
response = session . post ( url , data = sightings )
2016-08-09 13:58:54 +02:00
return self . _check_response ( response )
2016-04-29 16:35:27 +02:00
2017-02-17 10:32:25 +01:00
def sighting_per_json ( self , json_file ) :
2017-10-21 20:22:38 +02:00
""" Push a sighting (JSON file) """
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
2017-02-17 10:32:25 +01:00
jdata = json . load ( f )
return self . set_sightings ( jdata )
2016-05-19 14:09:01 +02:00
# ############## Sharing Groups ##################
def get_sharing_groups ( self ) :
2017-10-21 20:22:38 +02:00
""" Get the existing sharing groups """
2016-08-09 13:58:54 +02:00
session = self . __prepare_session ( )
2017-01-16 20:47:43 +01:00
url = urljoin ( self . root_url , ' sharing_groups.json ' )
2016-06-15 04:44:08 +02:00
response = session . get ( url )
2017-01-11 20:29:20 +01:00
return self . _check_response ( response ) [ ' response ' ]
2016-11-03 11:23:48 +01:00
# ############## Users ##################
2017-01-25 15:09:12 +01:00
def _set_user_parameters ( self , * * kwargs ) :
2016-11-15 10:59:29 +01:00
user = { }
2017-01-25 15:09:12 +01:00
if kwargs . get ( ' email ' ) :
user [ ' email ' ] = kwargs . get ( ' email ' )
if kwargs . get ( ' org_id ' ) :
user [ ' org_id ' ] = kwargs . get ( ' org_id ' )
if kwargs . get ( ' role_id ' ) :
user [ ' role_id ' ] = kwargs . get ( ' role_id ' )
if kwargs . get ( ' password ' ) :
user [ ' password ' ] = kwargs . get ( ' password ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' external_auth_required ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' external_auth_required ' ] = kwargs . get ( ' external_auth_required ' )
if kwargs . get ( ' external_auth_key ' ) :
user [ ' external_auth_key ' ] = kwargs . get ( ' external_auth_key ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' enable_password ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' enable_password ' ] = kwargs . get ( ' enable_password ' )
if kwargs . get ( ' nids_sid ' ) :
user [ ' nids_sid ' ] = kwargs . get ( ' nids_sid ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' server_id ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' server_id ' ] = kwargs . get ( ' server_id ' )
if kwargs . get ( ' gpgkey ' ) :
user [ ' gpgkey ' ] = kwargs . get ( ' gpgkey ' )
if kwargs . get ( ' certif_public ' ) :
user [ ' certif_public ' ] = kwargs . get ( ' certif_public ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' autoalert ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' autoalert ' ] = kwargs . get ( ' autoalert ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' contactalert ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' contactalert ' ] = kwargs . get ( ' contactalert ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' disabled ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' disabled ' ] = kwargs . get ( ' disabled ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' change_pw ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' change_pw ' ] = kwargs . get ( ' change_pw ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' termsaccepted ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' termsaccepted ' ] = kwargs . get ( ' termsaccepted ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' newsread ' ) is not None :
2017-01-25 15:09:12 +01:00
user [ ' newsread ' ] = kwargs . get ( ' newsread ' )
2017-02-21 14:29:44 +01:00
if kwargs . get ( ' authkey ' ) :
user [ ' authkey ' ] = kwargs . get ( ' authkey ' )
2016-11-15 10:59:29 +01:00
return user
2016-11-03 11:23:48 +01:00
def get_users_list ( self ) :
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/users ' )
response = session . get ( url )
return self . _check_response ( response ) [ ' response ' ]
def get_user ( self , user_id ) :
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/users/view/ {} ' . format ( user_id ) )
response = session . get ( url )
return self . _check_response ( response )
2017-01-25 15:09:12 +01:00
def add_user ( self , email , org_id , role_id , * * kwargs ) :
new_user = self . _set_user_parameters ( * * dict ( email = email , org_id = org_id , role_id = role_id , * * kwargs ) )
2016-11-04 11:58:21 +01:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/users/add/ ' )
response = session . post ( url , data = json . dumps ( new_user ) )
return self . _check_response ( response )
def add_user_json ( self , json_file ) :
2016-11-03 11:23:48 +01:00
session = self . __prepare_session ( )
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
2017-01-25 15:09:12 +01:00
jdata = json . load ( f )
2016-11-03 11:23:48 +01:00
url = urljoin ( self . root_url , ' admin/users/add/ ' )
response = session . post ( url , data = json . dumps ( jdata ) )
return self . _check_response ( response )
2017-01-25 15:09:12 +01:00
def get_user_fields_list ( self ) :
2016-11-03 11:23:48 +01:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/users/add/ ' )
response = session . get ( url )
return self . _check_response ( response )
2017-01-25 15:09:12 +01:00
def edit_user ( self , user_id , * * kwargs ) :
edit_user = self . _set_user_parameters ( * * kwargs )
2016-11-04 11:58:21 +01:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/users/edit/ {} ' . format ( user_id ) )
response = session . post ( url , data = json . dumps ( edit_user ) )
return self . _check_response ( response )
def edit_user_json ( self , json_file , user_id ) :
2016-11-03 11:23:48 +01:00
session = self . __prepare_session ( )
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
2017-01-25 15:09:12 +01:00
jdata = json . load ( f )
2016-11-03 11:23:48 +01:00
url = urljoin ( self . root_url , ' admin/users/edit/ {} ' . format ( user_id ) )
response = session . post ( url , data = json . dumps ( jdata ) )
return self . _check_response ( response )
2017-01-25 15:09:12 +01:00
def delete_user ( self , user_id ) :
2016-11-03 11:23:48 +01:00
session = self . __prepare_session ( )
2017-01-25 15:09:12 +01:00
url = urljoin ( self . root_url , ' admin/users/delete/ {} ' . format ( user_id ) )
response = session . post ( url )
return self . _check_response ( response )
# ############## Organisations ##################
def _set_organisation_parameters ( self , * * kwargs ) :
organisation = { }
if kwargs . get ( ' name ' ) :
organisation [ ' name ' ] = kwargs . get ( ' name ' )
if kwargs . get ( ' description ' ) :
organisation [ ' description ' ] = kwargs . get ( ' description ' )
if kwargs . get ( ' type ' ) :
organisation [ ' type ' ] = kwargs . get ( ' type ' )
if kwargs . get ( ' nationality ' ) :
organisation [ ' nationality ' ] = kwargs . get ( ' nationality ' )
if kwargs . get ( ' sector ' ) :
organisation [ ' sector ' ] = kwargs . get ( ' sector ' )
if kwargs . get ( ' uuid ' ) :
organisation [ ' uuid ' ] = kwargs . get ( ' uuid ' )
if kwargs . get ( ' contacts ' ) :
organisation [ ' contacts ' ] = kwargs . get ( ' contacts ' )
2017-04-13 15:05:13 +02:00
if kwargs . get ( ' local ' ) is not None :
2017-01-25 15:09:12 +01:00
organisation [ ' local ' ] = kwargs . get ( ' local ' )
return organisation
2017-04-06 14:52:50 +02:00
def get_organisations_list ( self , scope = " local " ) :
2017-01-25 15:09:12 +01:00
session = self . __prepare_session ( )
2017-04-11 15:59:12 +02:00
scope = scope . lower ( )
2017-04-06 14:52:50 +02:00
if scope not in [ " local " , " external " , " all " ] :
raise ValueError ( " Authorized fields are ' local ' , ' external ' or ' all ' " )
url = urljoin ( self . root_url , ' organisations/index/scope: {} ' . format ( scope ) )
2017-01-25 15:09:12 +01:00
response = session . get ( url )
return self . _check_response ( response ) [ ' response ' ]
def get_organisation ( self , organisation_id ) :
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' organisations/view/ {} ' . format ( organisation_id ) )
2016-11-03 11:23:48 +01:00
response = session . get ( url )
return self . _check_response ( response )
2017-01-25 15:09:12 +01:00
def add_organisation ( self , name , * * kwargs ) :
new_org = self . _set_organisation_parameters ( * * dict ( name = name , * * kwargs ) )
2016-11-03 11:23:48 +01:00
session = self . __prepare_session ( )
2017-04-11 15:59:12 +02:00
if ' local ' in new_org :
if new_org . get ( ' local ' ) is False :
2017-04-07 17:13:11 +02:00
if ' uuid ' not in new_org :
raise PyMISPError ( ' A remote org MUST have a valid uuid ' )
2017-01-25 15:09:12 +01:00
url = urljoin ( self . root_url , ' admin/organisations/add/ ' )
response = session . post ( url , data = json . dumps ( new_org ) )
return self . _check_response ( response )
def add_organisation_json ( self , json_file ) :
session = self . __prepare_session ( )
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
2017-01-25 15:09:12 +01:00
jdata = json . load ( f )
url = urljoin ( self . root_url , ' admin/organisations/add/ ' )
response = session . post ( url , data = json . dumps ( jdata ) )
return self . _check_response ( response )
def get_organisation_fields_list ( self ) :
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/organisations/add/ ' )
response = session . get ( url )
return self . _check_response ( response )
def edit_organisation ( self , org_id , * * kwargs ) :
edit_org = self . _set_organisation_parameters ( * * kwargs )
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/organisations/edit/ {} ' . format ( org_id ) )
response = session . post ( url , data = json . dumps ( edit_org ) )
return self . _check_response ( response )
def edit_organisation_json ( self , json_file , org_id ) :
session = self . __prepare_session ( )
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
2017-01-25 15:09:12 +01:00
jdata = json . load ( f )
url = urljoin ( self . root_url , ' admin/organisations/edit/ {} ' . format ( org_id ) )
response = session . post ( url , data = json . dumps ( jdata ) )
return self . _check_response ( response )
def delete_organisation ( self , org_id ) :
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' admin/organisations/delete/ {} ' . format ( org_id ) )
2016-11-03 11:23:48 +01:00
response = session . post ( url )
return self . _check_response ( response )
2017-02-22 17:22:00 +01:00
# ############## Servers ##################
def _set_server_organisation ( self , server , organisation ) :
2017-04-07 17:10:47 +02:00
if organisation is None :
raise PyMISPError ( ' Need a valid organisation as argument, create it before if needed ' )
if ' Organisation ' in organisation :
2017-04-11 15:59:12 +02:00
organisation = organisation . get ( ' Organisation ' )
2017-04-07 17:10:47 +02:00
if ' local ' not in organisation :
raise PyMISPError ( ' Need a valid organisation as argument. " local " value have not been set in this organisation ' )
if ' id ' not in organisation :
raise PyMISPError ( ' Need a valid organisation as argument. " id " value doesn \' t exist in provided organisation ' )
2017-04-11 15:59:12 +02:00
if organisation . get ( ' local ' ) : # Local organisation is '0' and remote organisation is '1'. These values are extracted from web interface of MISP
2017-04-07 17:10:47 +02:00
organisation_type = 0
else :
organisation_type = 1
server [ ' organisation_type ' ] = organisation_type
server [ ' json ' ] = json . dumps ( { ' id ' : organisation [ ' id ' ] } )
2017-02-22 17:22:00 +01:00
return server
def _set_server_parameters ( self , url , name , authkey , organisation , internal ,
2017-02-23 16:59:37 +01:00
push , pull , self_signed , push_rules , pull_rules ,
submitted_cert , submitted_client_cert , delete_cert ,
delete_client_cert ) :
2017-02-22 17:22:00 +01:00
server = { }
self . _set_server_organisation ( server , organisation )
if url is not None :
server [ ' url ' ] = url
if name is not None :
server [ ' name ' ] = name
if authkey is not None :
server [ ' authkey ' ] = authkey
if internal is not None :
server [ ' internal ' ] = internal
if push is not None :
server [ ' push ' ] = push
if pull is not None :
server [ ' pull ' ] = pull
if self_signed is not None :
server [ ' self_signed ' ] = self_signed
if push_rules is not None :
server [ ' push_rules ' ] = push_rules
if pull_rules is not None :
server [ ' pull_rules ' ] = pull_rules
if submitted_cert is not None :
server [ ' submitted_cert ' ] = submitted_cert
if submitted_client_cert is not None :
server [ ' submitted_client_cert ' ] = submitted_client_cert
2017-02-23 14:24:01 +01:00
if delete_cert is not None :
server [ ' delete_cert ' ] = delete_cert
if delete_client_cert is not None :
server [ ' delete_client_cert ' ] = delete_client_cert
2017-02-22 17:22:00 +01:00
return server
2017-04-10 14:13:53 +02:00
def add_server ( self , url , name , authkey , organisation , internal = None , push = False ,
pull = False , self_signed = False , push_rules = " " , pull_rules = " " ,
2017-02-23 16:59:37 +01:00
submitted_cert = None , submitted_client_cert = None ) :
2017-02-22 17:22:00 +01:00
new_server = self . _set_server_parameters ( url , name , authkey , organisation , internal ,
2017-02-23 16:59:37 +01:00
push , pull , self_signed , push_rules , pull_rules , submitted_cert ,
submitted_client_cert , None , None )
2017-02-22 17:22:00 +01:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' servers/add ' )
response = session . post ( url , data = json . dumps ( new_server ) )
return self . _check_response ( response )
def add_server_json ( self , json_file ) :
session = self . __prepare_session ( )
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
jdata = json . load ( f )
2017-02-22 17:22:00 +01:00
url = urljoin ( self . root_url , ' servers/add ' )
response = session . post ( url , data = json . dumps ( jdata ) )
return self . _check_response ( response )
2017-02-23 14:24:01 +01:00
2017-04-10 14:13:53 +02:00
def edit_server ( self , server_id , url = None , name = None , authkey = None , organisation = None , internal = None , push = False ,
pull = False , self_signed = False , push_rules = " " , pull_rules = " " ,
2017-02-23 16:59:37 +01:00
submitted_cert = None , submitted_client_cert = None , delete_cert = None , delete_client_cert = None ) :
2017-02-23 14:24:01 +01:00
new_server = self . _set_server_parameters ( url , name , authkey , organisation , internal ,
2017-02-23 16:59:37 +01:00
push , pull , self_signed , push_rules , pull_rules , submitted_cert ,
submitted_client_cert , delete_cert , delete_client_cert )
2017-02-23 14:24:01 +01:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' servers/edit/ {} ' . format ( server_id ) )
response = session . post ( url , data = json . dumps ( new_server ) )
return self . _check_response ( response )
2017-02-23 16:59:37 +01:00
def edit_server_json ( self , json_file , server_id ) :
2017-02-23 14:24:01 +01:00
session = self . __prepare_session ( )
2017-03-09 16:32:51 +01:00
with open ( json_file , ' r ' ) as f :
jdata = json . load ( f )
2017-02-23 14:24:01 +01:00
url = urljoin ( self . root_url , ' servers/edit/ {} ' . format ( server_id ) )
response = session . post ( url , data = json . dumps ( jdata ) )
return self . _check_response ( response )
2017-03-09 16:32:51 +01:00
2017-07-15 20:35:58 +02:00
# ############## Roles ##################
def get_roles_list ( self ) :
2017-10-21 20:22:38 +02:00
""" Get the list of existing roles """
2017-07-15 20:35:58 +02:00
session = self . __prepare_session ( )
2017-07-31 17:28:47 +02:00
url = urljoin ( self . root_url , ' /roles ' )
2017-07-15 20:35:58 +02:00
response = session . get ( url )
return self . _check_response ( response ) [ ' response ' ]
# ############## Tags ##################
def get_tags_list ( self ) :
2017-10-21 20:22:38 +02:00
""" Get the list of existing tags """
2017-07-15 20:35:58 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' /tags ' )
response = session . get ( url )
return self . _check_response ( response ) [ ' Tag ' ]
2017-03-09 16:32:51 +01:00
# ##############################################
# ############### Non-JSON output ##############
# ##############################################
# ############## Suricata ##############
def download_all_suricata ( self ) :
""" Download all suricata rules events. """
suricata_rules = urljoin ( self . root_url , ' events/nids/suricata/download ' )
session = self . __prepare_session ( ' rules ' )
response = session . get ( suricata_rules )
return response
def download_suricata_rule_event ( self , event_id ) :
""" Download one suricata rule event.
: param event_id : ID of the event to download ( same as get )
"""
template = urljoin ( self . root_url , ' events/nids/suricata/download/ {} ' . format ( event_id ) )
session = self . __prepare_session ( ' rules ' )
response = session . get ( template )
return response
# ############## Text ###############
def get_all_attributes_txt ( self , type_attr , tags = False , eventId = False , allowNonIDS = False , date_from = False , date_to = False , last = False , enforceWarninglist = False , allowNotPublished = False ) :
""" Get all attributes from a specific type as plain text. Only published and IDS flagged attributes are exported, except if stated otherwise. """
session = self . __prepare_session ( ' txt ' )
url = urljoin ( self . root_url , ' attributes/text/download/ %s / %s / %s / %s / %s / %s / %s / %s / %s ' % ( type_attr , tags , eventId , allowNonIDS , date_from , date_to , last , enforceWarninglist , allowNotPublished ) )
response = session . get ( url )
return response
# ############## STIX ##############
def get_stix_event ( self , event_id = None , with_attachments = False , from_date = False , to_date = False , tags = False ) :
""" Get an event/events in STIX format """
if tags :
if isinstance ( tags , list ) :
tags = " && " . join ( tags )
session = self . __prepare_session ( )
url = urljoin ( self . root_url , " /events/stix/download/ {} / {} / {} / {} / {} " . format (
event_id , with_attachments , tags , from_date , to_date ) )
2017-05-31 18:07:31 +02:00
logger . debug ( " Getting STIX event from %s " , url )
2017-03-09 16:32:51 +01:00
response = session . get ( url )
return self . _check_response ( response )
def get_stix ( self , * * kwargs ) :
return self . get_stix_event ( * * kwargs )
2017-05-29 13:57:02 +02:00
# ###########################
# ######## Feed #########
# ###########################
2017-06-13 15:37:39 +02:00
2017-05-29 13:57:02 +02:00
def fetch_feed ( self , feed_id ) :
2017-10-21 20:22:38 +02:00
""" Fetch one single feed """
2017-05-29 13:57:02 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/fetchFromFeed/ {} ' . format ( feed_id ) )
response = session . get ( url )
return self . _check_response ( response )
2017-06-13 15:37:39 +02:00
2017-07-03 22:56:44 +02:00
def view_feeds ( self ) :
2017-10-21 20:22:38 +02:00
""" Get the content of all the feeds """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds ' )
response = session . get ( url )
return self . _check_response ( response )
def view_feed ( self , feed_ids ) :
2017-10-21 20:22:38 +02:00
""" Get the content of a single feed """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/view/ {} ' . format ( feed_ids ) )
response = session . get ( url )
return self . _check_response ( response )
def cache_feeds_all ( self ) :
2017-10-21 20:22:38 +02:00
""" Cache all the feeds """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/cacheFeeds/all ' )
response = session . get ( url )
return self . _check_response ( response )
def cache_feed ( self , feed_id ) :
2017-10-21 20:22:38 +02:00
""" Cache a specific feed """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/cacheFeeds/ {} ' . format ( feed_id ) )
response = session . get ( url )
return self . _check_response ( response )
def cache_feeds_freetext ( self ) :
2017-10-21 20:22:38 +02:00
""" Cache all the freetext feeds """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/cacheFeeds/freetext ' )
response = session . get ( url )
return self . _check_response ( response )
def cache_feeds_misp ( self ) :
2017-10-21 20:22:38 +02:00
""" Cache all the MISP feeds """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/cacheFeeds/misp ' )
response = session . get ( url )
return self . _check_response ( response )
def compare_feeds ( self ) :
2017-10-21 20:22:38 +02:00
""" Generate the comparison matrix for all the MISP feeds """
2017-07-03 22:56:44 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' feeds/compareFeeds ' )
response = session . get ( url )
return self . _check_response ( response )
2017-07-06 23:06:59 +02:00
def cache_all_feeds ( self ) :
2017-10-21 20:22:38 +02:00
""" Alias for cache_feeds_all """
# DEPRECATED
return self . cache_feeds_all ( )
2017-07-06 23:06:59 +02:00
2017-07-21 18:47:10 +02:00
# ###################
# ### Objects ###
# ###################
def add_object ( self , event_id , template_id , misp_object ) :
2017-10-21 20:22:38 +02:00
""" Add an object """
2017-07-21 18:47:10 +02:00
session = self . __prepare_session ( )
2017-07-24 17:16:40 +02:00
url = urljoin ( self . root_url , ' objects/add/ {} / {} ' . format ( event_id , template_id ) )
2017-08-24 19:21:52 +02:00
response = session . post ( url , data = misp_object . to_json ( ) )
2017-07-21 18:47:10 +02:00
return self . _check_response ( response )
2017-08-24 19:21:52 +02:00
def add_object_reference ( self , misp_object_reference ) :
2017-10-21 20:22:38 +02:00
""" Add a reference to an object """
2017-08-23 15:36:13 +02:00
session = self . __prepare_session ( )
2017-08-24 19:21:52 +02:00
url = urljoin ( self . root_url , ' object_references/add ' )
response = session . post ( url , data = misp_object_reference . to_json ( ) )
2017-08-23 15:36:13 +02:00
return self . _check_response ( response )
2017-07-24 17:16:40 +02:00
def get_object_templates_list ( self ) :
2017-10-21 20:22:38 +02:00
""" Returns the list of Object templates available on the MISP instance """
2017-07-24 17:16:40 +02:00
session = self . __prepare_session ( )
url = urljoin ( self . root_url , ' objectTemplates ' )
response = session . get ( url )
return self . _check_response ( response ) [ ' response ' ]
2017-08-25 09:45:56 +02:00
def get_object_template_id ( self , object_uuid ) :
2017-10-21 20:22:38 +02:00
""" Gets the template ID corresponting the UUID passed as parameter """
2017-07-24 17:16:40 +02:00
templates = self . get_object_templates_list ( )
for t in templates :
2017-08-25 09:45:56 +02:00
if t [ ' ObjectTemplate ' ] [ ' uuid ' ] == object_uuid :
2017-07-24 17:16:40 +02:00
return t [ ' ObjectTemplate ' ] [ ' id ' ]
2017-08-25 09:45:56 +02:00
raise Exception ( ' Unable to find template uuid {} on the MISP instance ' . format ( object_uuid ) )
2017-07-24 17:16:40 +02:00
2017-03-09 16:32:51 +01:00
# ###########################
# ####### Deprecated ########
# ###########################
@deprecated
def add_tag ( self , event , tag , attribute = False ) :
session = self . __prepare_session ( )
if attribute :
to_post = { ' request ' : { ' Attribute ' : { ' id ' : event [ ' id ' ] , ' tag ' : tag } } }
path = ' attributes/addTag '
else :
# Allow for backwards-compat with old style
if " Event " in event :
event = event [ " Event " ]
to_post = { ' request ' : { ' Event ' : { ' id ' : event [ ' id ' ] , ' tag ' : tag } } }
path = ' events/addTag '
response = session . post ( urljoin ( self . root_url , path ) , data = json . dumps ( to_post ) )
return self . _check_response ( response )
@deprecated
def remove_tag ( self , event , tag , attribute = False ) :
session = self . __prepare_session ( )
if attribute :
to_post = { ' request ' : { ' Attribute ' : { ' id ' : event [ ' id ' ] , ' tag ' : tag } } }
2017-04-03 10:04:59 +02:00
path = ' attributes/removeTag '
2017-03-09 16:32:51 +01:00
else :
to_post = { ' request ' : { ' Event ' : { ' id ' : event [ ' Event ' ] [ ' id ' ] , ' tag ' : tag } } }
2017-04-03 10:04:59 +02:00
path = ' events/removeTag '
2017-03-09 16:32:51 +01:00
response = session . post ( urljoin ( self . root_url , path ) , data = json . dumps ( to_post ) )
return self . _check_response ( response )