fix: Make pep8 happy

pipenv
Raphaël Vinot 2018-12-11 15:29:09 +01:00
parent d0aec62f1a
commit 8fc5b1fd1f
56 changed files with 695 additions and 638 deletions

View File

@ -13,7 +13,7 @@ python:
- "3.7-dev" - "3.7-dev"
install: install:
- pip install -U nose codecov pytest - pip install -U nose codecov pytest flake8
- pip install -U -r REQUIREMENTS - pip install -U -r REQUIREMENTS
- pip install . - pip install .
@ -30,6 +30,7 @@ script:
- sleep 5 - sleep 5
- nosetests --with-coverage --cover-package=misp_modules - nosetests --with-coverage --cover-package=misp_modules
- kill -s INT $pid - kill -s INT $pid
- flake8 --ignore=E501,W503 misp_modules
after_success: after_success:
- coverage combine .coverage* - coverage combine .coverage*

View File

@ -38,14 +38,14 @@ from tornado.concurrent import run_on_executor
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
try: try:
from .modules import * from .modules import * # noqa
HAS_PACKAGE_MODULES = True HAS_PACKAGE_MODULES = True
except Exception as e: except Exception as e:
print(e) print(e)
HAS_PACKAGE_MODULES = False HAS_PACKAGE_MODULES = False
try: try:
from .helpers import * from .helpers import * # noqa
HAS_PACKAGE_HELPERS = True HAS_PACKAGE_HELPERS = True
except Exception as e: except Exception as e:
print(e) print(e)
@ -148,7 +148,7 @@ def load_package_modules():
mhandlers = {} mhandlers = {}
modules = [] modules = []
for path, module in sys.modules.items(): for path, module in sys.modules.items():
r = re.findall("misp_modules[.]modules[.](\w+)[.]([^_]\w+)", path) r = re.findall(r"misp_modules[.]modules[.](\w+)[.]([^_]\w+)", path)
if r and len(r[0]) == 2: if r and len(r[0]) == 2:
moduletype, modulename = r[0] moduletype, modulename = r[0]
mhandlers[modulename] = module mhandlers[modulename] = module
@ -159,6 +159,9 @@ def load_package_modules():
class ListModules(tornado.web.RequestHandler): class ListModules(tornado.web.RequestHandler):
global loaded_modules
global mhandlers
def get(self): def get(self):
ret = [] ret = []
for module in loaded_modules: for module in loaded_modules:
@ -238,7 +241,7 @@ def main():
for module in args.m: for module in args.m:
mispmod = importlib.import_module(module) mispmod = importlib.import_module(module)
mispmod.register(mhandlers, loaded_modules) mispmod.register(mhandlers, loaded_modules)
service = [(r'/modules', ListModules), (r'/query', QueryModule)] service = [(r'/modules', ListModules), (r'/query', QueryModule)]
application = tornado.web.Application(service) application = tornado.web.Application(service)

View File

@ -33,7 +33,7 @@ def selftest(enable=True):
r = redis.StrictRedis(host=hostname, port=port, db=db) r = redis.StrictRedis(host=hostname, port=port, db=db)
try: try:
r.ping() r.ping()
except: except Exception:
return 'Redis not running or not installed. Helper will be disabled.' return 'Redis not running or not installed. Helper will be disabled.'
@ -62,6 +62,7 @@ def flush():
returncode = r.flushdb() returncode = r.flushdb()
return returncode return returncode
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
if selftest() is not None: if selftest() is not None:

View File

@ -1,3 +1,3 @@
from .expansion import * from .expansion import * # noqa
from .import_mod import * from .import_mod import * # noqa
from .export_mod import * from .export_mod import * # noqa

View File

@ -1,4 +1,4 @@
from . import _vmray from . import _vmray # noqa
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl', __all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi', 'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',

View File

@ -47,9 +47,11 @@ options = None
locale.setlocale(locale.LC_ALL, '') locale.setlocale(locale.LC_ALL, '')
class QueryError(Exception): class QueryError(Exception):
pass pass
class DnsdbClient(object): class DnsdbClient(object):
def __init__(self, server, apikey, limit=None, http_proxy=None, https_proxy=None): def __init__(self, server, apikey, limit=None, http_proxy=None, https_proxy=None):
self.server = server self.server = server
@ -81,7 +83,6 @@ class DnsdbClient(object):
return self._query(path, before, after) return self._query(path, before, after)
def _query(self, path, before=None, after=None): def _query(self, path, before=None, after=None):
res = []
url = '%s/lookup/%s' % (self.server, path) url = '%s/lookup/%s' % (self.server, path)
params = {} params = {}
@ -120,12 +121,15 @@ class DnsdbClient(object):
except (HTTPError, URLError) as e: except (HTTPError, URLError) as e:
raise QueryError(str(e), sys.exc_traceback) raise QueryError(str(e), sys.exc_traceback)
def quote(path): def quote(path):
return urllib_quote(path, safe='') return urllib_quote(path, safe='')
def sec_to_text(ts): def sec_to_text(ts):
return time.strftime('%Y-%m-%d %H:%M:%S -0000', time.gmtime(ts)) return time.strftime('%Y-%m-%d %H:%M:%S -0000', time.gmtime(ts))
def rrset_to_text(m): def rrset_to_text(m):
s = StringIO() s = StringIO()
@ -155,9 +159,11 @@ def rrset_to_text(m):
finally: finally:
s.close() s.close()
def rdata_to_text(m): def rdata_to_text(m):
return '%s IN %s %s' % (m['rrname'], m['rrtype'], m['rdata']) return '%s IN %s %s' % (m['rrname'], m['rrtype'], m['rdata'])
def parse_config(cfg_files): def parse_config(cfg_files):
config = {} config = {}
@ -172,6 +178,7 @@ def parse_config(cfg_files):
return config return config
def time_parse(s): def time_parse(s):
try: try:
epoch = int(s) epoch = int(s)
@ -193,14 +200,15 @@ def time_parse(s):
m = re.match(r'^(?=\d)(?:(\d+)w)?(?:(\d+)d)?(?:(\d+)h)?(?:(\d+)m)?(?:(\d+)s?)?$', s, re.I) m = re.match(r'^(?=\d)(?:(\d+)w)?(?:(\d+)d)?(?:(\d+)h)?(?:(\d+)m)?(?:(\d+)s?)?$', s, re.I)
if m: if m:
return -1*(int(m.group(1) or 0)*604800 + return -1 * (int(m.group(1) or 0) * 604800
int(m.group(2) or 0)*86400+ + int(m.group(2) or 0) * 86400
int(m.group(3) or 0)*3600+ + int(m.group(3) or 0) * 3600
int(m.group(4) or 0)*60+ + int(m.group(4) or 0) * 60
int(m.group(5) or 0)) + int(m.group(5) or 0))
raise ValueError('Invalid time: "%s"' % s) raise ValueError('Invalid time: "%s"' % s)
def epipe_wrapper(func): def epipe_wrapper(func):
def f(*args, **kwargs): def f(*args, **kwargs):
try: try:
@ -211,31 +219,23 @@ def epipe_wrapper(func):
raise raise
return f return f
@epipe_wrapper @epipe_wrapper
def main(): def main():
global cfg global cfg
global options global options
parser = optparse.OptionParser(epilog='Time formats are: "%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%d" (UNIX timestamp), "-%d" (Relative time in seconds), BIND format (e.g. 1w1h, (w)eek, (d)ay, (h)our, (m)inute, (s)econd)') parser = optparse.OptionParser(epilog='Time formats are: "%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%d" (UNIX timestamp), "-%d" (Relative time in seconds), BIND format (e.g. 1w1h, (w)eek, (d)ay, (h)our, (m)inute, (s)econd)')
parser.add_option('-c', '--config', dest='config', parser.add_option('-c', '--config', dest='config', help='config file', action='append')
help='config file', action='append') parser.add_option('-r', '--rrset', dest='rrset', type='string', help='rrset <ONAME>[/<RRTYPE>[/BAILIWICK]]')
parser.add_option('-r', '--rrset', dest='rrset', type='string', parser.add_option('-n', '--rdataname', dest='rdata_name', type='string', help='rdata name <NAME>[/<RRTYPE>]')
help='rrset <ONAME>[/<RRTYPE>[/BAILIWICK]]') parser.add_option('-i', '--rdataip', dest='rdata_ip', type='string', help='rdata ip <IPADDRESS|IPRANGE|IPNETWORK>')
parser.add_option('-n', '--rdataname', dest='rdata_name', type='string', parser.add_option('-t', '--rrtype', dest='rrtype', type='string', help='rrset or rdata rrtype')
help='rdata name <NAME>[/<RRTYPE>]') parser.add_option('-b', '--bailiwick', dest='bailiwick', type='string', help='rrset bailiwick')
parser.add_option('-i', '--rdataip', dest='rdata_ip', type='string',
help='rdata ip <IPADDRESS|IPRANGE|IPNETWORK>')
parser.add_option('-t', '--rrtype', dest='rrtype', type='string',
help='rrset or rdata rrtype')
parser.add_option('-b', '--bailiwick', dest='bailiwick', type='string',
help='rrset bailiwick')
parser.add_option('-s', '--sort', dest='sort', type='string', help='sort key') parser.add_option('-s', '--sort', dest='sort', type='string', help='sort key')
parser.add_option('-R', '--reverse', dest='reverse', action='store_true', default=False, parser.add_option('-R', '--reverse', dest='reverse', action='store_true', default=False, help='reverse sort')
help='reverse sort') parser.add_option('-j', '--json', dest='json', action='store_true', default=False, help='output in JSON format')
parser.add_option('-j', '--json', dest='json', action='store_true', default=False, parser.add_option('-l', '--limit', dest='limit', type='int', default=0, help='limit number of results')
help='output in JSON format')
parser.add_option('-l', '--limit', dest='limit', type='int', default=0,
help='limit number of results')
parser.add_option('', '--before', dest='before', type='string', help='only output results seen before this time') parser.add_option('', '--before', dest='before', type='string', help='only output results seen before this time')
parser.add_option('', '--after', dest='after', type='string', help='only output results seen after this time') parser.add_option('', '--after', dest='after', type='string', help='only output results seen after this time')
@ -263,20 +263,20 @@ def main():
print(str(e), file=sys.stderr) print(str(e), file=sys.stderr)
sys.exit(1) sys.exit(1)
if not 'DNSDB_SERVER' in cfg: if 'DNSDB_SERVER' not in cfg:
cfg['DNSDB_SERVER'] = DEFAULT_DNSDB_SERVER cfg['DNSDB_SERVER'] = DEFAULT_DNSDB_SERVER
if not 'HTTP_PROXY' in cfg: if 'HTTP_PROXY' not in cfg:
cfg['HTTP_PROXY'] = DEFAULT_HTTP_PROXY cfg['HTTP_PROXY'] = DEFAULT_HTTP_PROXY
if not 'HTTPS_PROXY' in cfg: if 'HTTPS_PROXY' not in cfg:
cfg['HTTPS_PROXY'] = DEFAULT_HTTPS_PROXY cfg['HTTPS_PROXY'] = DEFAULT_HTTPS_PROXY
if not 'APIKEY' in cfg: if 'APIKEY' not in cfg:
sys.stderr.write('dnsdb_query: APIKEY not defined in config file\n') sys.stderr.write('dnsdb_query: APIKEY not defined in config file\n')
sys.exit(1) sys.exit(1)
client = DnsdbClient(cfg['DNSDB_SERVER'], cfg['APIKEY'], client = DnsdbClient(cfg['DNSDB_SERVER'], cfg['APIKEY'],
limit=options.limit, limit=options.limit,
http_proxy=cfg['HTTP_PROXY'], http_proxy=cfg['HTTP_PROXY'],
https_proxy=cfg['HTTPS_PROXY']) https_proxy=cfg['HTTPS_PROXY'])
if options.rrset: if options.rrset:
if options.rrtype or options.bailiwick: if options.rrtype or options.bailiwick:
qargs = (options.rrset, options.rrtype, options.bailiwick) qargs = (options.rrset, options.rrtype, options.bailiwick)
@ -307,7 +307,7 @@ def main():
if options.sort: if options.sort:
results = list(results) results = list(results)
if len(results) > 0: if len(results) > 0:
if not options.sort in results[0]: if options.sort not in results[0]:
sort_keys = results[0].keys() sort_keys = results[0].keys()
sort_keys.sort() sort_keys.sort()
sys.stderr.write('dnsdb_query: invalid sort key "%s". valid sort keys are %s\n' % (options.sort, ', '.join(sort_keys))) sys.stderr.write('dnsdb_query: invalid sort key "%s". valid sort keys are %s\n' % (options.sort, ', '.join(sort_keys)))
@ -319,5 +319,6 @@ def main():
print(e.message, file=sys.stderr) print(e.message, file=sys.stderr)
sys.exit(1) sys.exit(1)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -22,9 +22,9 @@ def handler(q=False):
misperrors['error'] = "Unsupported attributes type" misperrors['error'] = "Unsupported attributes type"
return misperrors return misperrors
if not request.get('config') and not (request['config'].get('host') and if not request.get('config') and not (request['config'].get('host')
request['config'].get('port') and and request['config'].get('port')
request['config'].get('db')): and request['config'].get('db')):
misperrors['error'] = 'ASN description history configuration is missing' misperrors['error'] = 'ASN description history configuration is missing'
return misperrors return misperrors

View File

@ -12,24 +12,25 @@ moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
moduleconfig = [] moduleconfig = []
blockchain_firstseen='https://blockchain.info/q/addressfirstseen/' blockchain_firstseen = 'https://blockchain.info/q/addressfirstseen/'
blockchain_balance='https://blockchain.info/q/addressbalance/' blockchain_balance = 'https://blockchain.info/q/addressbalance/'
blockchain_totalreceived='https://blockchain.info/q/getreceivedbyaddress/' blockchain_totalreceived = 'https://blockchain.info/q/getreceivedbyaddress/'
blockchain_all='https://blockchain.info/rawaddr/' blockchain_all = 'https://blockchain.info/rawaddr/{}?filter=5{}'
converter = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym=BTC&tsyms=USD,EUR&ts=' converter = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym=BTC&tsyms=USD,EUR&ts={}'
converter_rls = 'https://min-api.cryptocompare.com/stats/rate/limit' converter_rls = 'https://min-api.cryptocompare.com/stats/rate/limit'
result_text = "" result_text = ""
g_rate_limit = 300 g_rate_limit = 300
start_time = 0 start_time = 0
conversion_rates = {} conversion_rates = {}
def get_consumption(output=False): def get_consumption(output=False):
try: try:
req = requests.get(converter_rls) req = requests.get(converter_rls)
jreq = req.json() jreq = req.json()
minute = str(jreq['Data']['calls_left']['minute']) minute = str(jreq['Data']['calls_left']['minute'])
hour = str(jreq['Data']['calls_left']['hour']) hour = str(jreq['Data']['calls_left']['hour'])
except: except Exception:
minute = str(-1) minute = str(-1)
hour = str(-1) hour = str(-1)
# Debug out for the console # Debug out for the console
@ -53,20 +54,20 @@ def convert(btc, timestamp):
minute, hour = get_consumption() minute, hour = get_consumption()
g_rate_limit -= 1 g_rate_limit -= 1
now = time.time() now = time.time()
delta = now - start_time # delta = now - start_time
#print(g_rate_limit) # print(g_rate_limit)
if g_rate_limit <= 10: if g_rate_limit <= 10:
minute, hour = get_consumption(output=True) minute, hour = get_consumption(output=True)
if int(minute) <= 10: if int(minute) <= 10:
#print(minute) # print(minute)
#get_consumption(output=True) # get_consumption(output=True)
time.sleep(3) time.sleep(3)
else: else:
mprint(minute) mprint(minute)
start_time = time.time() start_time = time.time()
g_rate_limit = int(minute) g_rate_limit = int(minute)
try: try:
req = requests.get(converter+str(timestamp)) req = requests.get(converter.format(timestamp))
jreq = req.json() jreq = req.json()
usd = jreq['BTC']['USD'] usd = jreq['BTC']['USD']
eur = jreq['BTC']['EUR'] eur = jreq['BTC']['EUR']
@ -78,7 +79,7 @@ def convert(btc, timestamp):
# Actually convert and return the values # Actually convert and return the values
u = usd * btc u = usd * btc
e = eur * btc e = eur * btc
return u,e return u, e
def mprint(input): def mprint(input):
@ -90,8 +91,8 @@ def mprint(input):
def handler(q=False): def handler(q=False):
global result_text global result_text
global conversion_rates global conversion_rates
start_time = time.time() # start_time = time.time()
now = time.time() # now = time.time()
if q is False: if q is False:
return False return False
request = json.loads(q) request = json.loads(q)
@ -107,13 +108,13 @@ def handler(q=False):
mprint("\nAddress:\t" + btc) mprint("\nAddress:\t" + btc)
try: try:
req = requests.get(blockchain_all+btc+"?limit=50&filter=5") req = requests.get(blockchain_all.format(btc, "&limit=50"))
jreq = req.json() jreq = req.json()
except Exception as e: except Exception:
#print(e) # print(e)
print(req.text) print(req.text)
result_text = "" result_text = ""
sys.exit(1) sys.exit(1)
n_tx = jreq['n_tx'] n_tx = jreq['n_tx']
balance = float(jreq['final_balance'] / 100000000) balance = float(jreq['final_balance'] / 100000000)
@ -130,11 +131,11 @@ def handler(q=False):
i = 0 i = 0
while i < n_tx: while i < n_tx:
if click is False: if click is False:
req = requests.get(blockchain_all+btc+"?limit=5&offset="+str(i)+"&filter=5") req = requests.get(blockchain_all.format(btc, "&limit=5&offset={}".format(i)))
if n_tx > 5: if n_tx > 5:
n_tx = 5 n_tx = 5
else: else:
req = requests.get(blockchain_all+btc+"?limit=50&offset="+str(i)+"&filter=5") req = requests.get(blockchain_all.format(btc, "&limit=50&offset={}".format(i)))
jreq = req.json() jreq = req.json()
if jreq['txs']: if jreq['txs']:
for transactions in jreq['txs']: for transactions in jreq['txs']:
@ -144,8 +145,8 @@ def handler(q=False):
script_old = tx['script'] script_old = tx['script']
if tx['prev_out']['value'] != 0 and tx['prev_out']['addr'] == btc: if tx['prev_out']['value'] != 0 and tx['prev_out']['addr'] == btc:
datetime = time.strftime("%d %b %Y %H:%M:%S %Z", time.localtime(int(transactions['time']))) datetime = time.strftime("%d %b %Y %H:%M:%S %Z", time.localtime(int(transactions['time'])))
value = float(tx['prev_out']['value'] / 100000000 ) value = float(tx['prev_out']['value'] / 100000000)
u,e = convert(value, transactions['time']) u, e = convert(value, transactions['time'])
mprint("#" + str(n_tx - i) + "\t" + str(datetime) + "\t-{0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR".format(value, u, e).rstrip('0')) mprint("#" + str(n_tx - i) + "\t" + str(datetime) + "\t-{0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR".format(value, u, e).rstrip('0'))
if script_old != tx['script']: if script_old != tx['script']:
i += 1 i += 1
@ -153,19 +154,19 @@ def handler(q=False):
sum_counter += 1 sum_counter += 1
sum += value sum += value
if sum_counter > 1: if sum_counter > 1:
u,e = convert(sum, transactions['time']) u, e = convert(sum, transactions['time'])
mprint("\t\t\t\t\t----------------------------------------------") mprint("\t\t\t\t\t----------------------------------------------")
mprint("#" + str(n_tx - i) + "\t\t\t\t Sum:\t-{0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR\n".format(sum, u, e).rstrip('0')) mprint("#" + str(n_tx - i) + "\t\t\t\t Sum:\t-{0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR\n".format(sum, u, e).rstrip('0'))
for tx in transactions['out']: for tx in transactions['out']:
if tx['value'] != 0 and tx['addr'] == btc: if tx['value'] != 0 and tx['addr'] == btc:
datetime = time.strftime("%d %b %Y %H:%M:%S %Z", time.localtime(int(transactions['time']))) datetime = time.strftime("%d %b %Y %H:%M:%S %Z", time.localtime(int(transactions['time'])))
value = float(tx['value'] / 100000000 ) value = float(tx['value'] / 100000000)
u,e = convert(value, transactions['time']) u, e = convert(value, transactions['time'])
mprint("#" + str(n_tx - i) + "\t" + str(datetime) + "\t {0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR".format(value, u, e).rstrip('0')) mprint("#" + str(n_tx - i) + "\t" + str(datetime) + "\t {0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR".format(value, u, e).rstrip('0'))
#i += 1 # i += 1
i += 1 i += 1
r = { r = {
'results': [ 'results': [
{ {
'types': ['text'], 'types': ['text'],

View File

@ -12,20 +12,21 @@ moduleinfo = {'version': '1', 'author': 'Hannah Ward',
# config fields that your code expects from the site admin # config fields that your code expects from the site admin
moduleconfig = [] moduleconfig = []
common_tlds = {"com":"Commercial (Worldwide)", common_tlds = {"com": "Commercial (Worldwide)",
"org":"Organisation (Worldwide)", "org": "Organisation (Worldwide)",
"net":"Network (Worldwide)", "net": "Network (Worldwide)",
"int":"International (Worldwide)", "int": "International (Worldwide)",
"edu":"Education (Usually USA)", "edu": "Education (Usually USA)",
"gov":"Government (USA)" "gov": "Government (USA)"
} }
codes = False codes = False
def handler(q=False): def handler(q=False):
global codes global codes
if not codes: if not codes:
codes = requests.get("http://www.geognos.com/api/en/countries/info/all.json").json() codes = requests.get("http://www.geognos.com/api/en/countries/info/all.json").json()
if q is False: if q is False:
return False return False
request = json.loads(q) request = json.loads(q)
@ -36,18 +37,18 @@ def handler(q=False):
# Check if it's a common, non country one # Check if it's a common, non country one
if ext in common_tlds.keys(): if ext in common_tlds.keys():
val = common_tlds[ext] val = common_tlds[ext]
else: else:
# Retrieve a json full of country info # Retrieve a json full of country info
if not codes["StatusMsg"] == "OK": if not codes["StatusMsg"] == "OK":
val = "Unknown" val = "Unknown"
else: else:
# Find our code based on TLD # Find our code based on TLD
codes = codes["Results"] codes = codes["Results"]
for code in codes.keys(): for code in codes.keys():
if codes[code]["CountryCodes"]["tld"] == ext: if codes[code]["CountryCodes"]["tld"] == ext:
val = codes[code]["Name"] val = codes[code]["Name"]
r = {'results': [{'types':['text'], 'values':[val]}]} r = {'results': [{'types': ['text'], 'values':[val]}]}
return r return r
@ -58,4 +59,3 @@ def introspection():
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -1,6 +1,5 @@
import json import json
import datetime import sys
from collections import defaultdict
try: try:
import dns.resolver import dns.resolver
@ -30,12 +29,14 @@ dbl_mapping = {'127.0.1.2': 'spam domain',
'127.0.1.106': 'abused legit botnet C&C', '127.0.1.106': 'abused legit botnet C&C',
'127.0.1.255': 'IP queries prohibited!'} '127.0.1.255': 'IP queries prohibited!'}
def fetch_requested_value(request): def fetch_requested_value(request):
for attribute_type in mispattributes['input']: for attribute_type in mispattributes['input']:
if request.get(attribute_type): if request.get(attribute_type):
return request[attribute_type].split('|')[0] return request[attribute_type].split('|')[0]
return None return None
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -52,9 +53,11 @@ def handler(q=False):
result = str(e) result = str(e)
return {'results': [{'types': mispattributes.get('output'), 'values': result}]} return {'results': [{'types': mispattributes.get('output'), 'values': result}]}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -43,7 +43,7 @@ def handler(q=False):
except dns.exception.Timeout: except dns.exception.Timeout:
misperrors['error'] = "Timeout" misperrors['error'] = "Timeout"
return misperrors return misperrors
except: except Exception:
misperrors['error'] = "DNS resolving error" misperrors['error'] = "DNS resolving error"
return misperrors return misperrors

View File

@ -51,7 +51,7 @@ def lookup_name(client, name):
for i in item.get('rdata'): for i in item.get('rdata'):
# grab email field and replace first dot by @ to convert to an email address # grab email field and replace first dot by @ to convert to an email address
yield(i.split(' ')[1].rstrip('.').replace('.', '@', 1)) yield(i.split(' ')[1].rstrip('.').replace('.', '@', 1))
except QueryError as e: except QueryError:
pass pass
try: try:
@ -59,7 +59,7 @@ def lookup_name(client, name):
for item in res: for item in res:
if item.get('rrtype') in ['A', 'AAAA', 'CNAME']: if item.get('rrtype') in ['A', 'AAAA', 'CNAME']:
yield(item.get('rrname').rstrip('.')) yield(item.get('rrname').rstrip('.'))
except QueryError as e: except QueryError:
pass pass
@ -68,7 +68,7 @@ def lookup_ip(client, ip):
res = client.query_rdata_ip(ip) res = client.query_rdata_ip(ip)
for item in res: for item in res:
yield(item['rrname'].rstrip('.')) yield(item['rrname'].rstrip('.'))
except QueryError as e: except QueryError:
pass pass

View File

@ -27,7 +27,7 @@ try:
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'geoip_country.cfg')) config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'geoip_country.cfg'))
gi = pygeoip.GeoIP(config.get('GEOIP', 'database')) gi = pygeoip.GeoIP(config.get('GEOIP', 'database'))
enabled = True enabled = True
except: except Exception:
enabled = False enabled = False
@ -49,7 +49,7 @@ def handler(q=False):
try: try:
answer = gi.country_code_by_addr(toquery) answer = gi.country_code_by_addr(toquery)
except: except Exception:
misperrors['error'] = "GeoIP resolving error" misperrors['error'] = "GeoIP resolving error"
return misperrors return misperrors

View File

@ -20,7 +20,7 @@ def handler(q=False):
if v is None: if v is None:
misperrors['error'] = 'Hash value is missing.' misperrors['error'] = 'Hash value is missing.'
return misperrors return misperrors
r = requests.post(hashddapi_url, data={'hash':v}) r = requests.post(hashddapi_url, data={'hash': v})
if r.status_code == 200: if r.status_code == 200:
state = json.loads(r.text) state = json.loads(r.text)
if state: if state:

View File

@ -24,9 +24,9 @@ def handler(q=False):
misperrors['error'] = "Unsupported attributes type" misperrors['error'] = "Unsupported attributes type"
return misperrors return misperrors
if not request.get('config') and not (request['config'].get('host') and if not request.get('config') and not (request['config'].get('host')
request['config'].get('port') and and request['config'].get('port')
request['config'].get('db')): and request['config'].get('db')):
misperrors['error'] = 'IP ASN history configuration is missing' misperrors['error'] = 'IP ASN history configuration is missing'
return misperrors return misperrors

View File

@ -45,7 +45,7 @@ def parse_iprep(ip, api):
url = 'https://www.packetmail.net/iprep.php/%s' % ip url = 'https://www.packetmail.net/iprep.php/%s' % ip
try: try:
data = requests.get(url, params={'apikey': api}).json() data = requests.get(url, params={'apikey': api}).json()
except: except Exception:
return ['Error pulling data'], rep return ['Error pulling data'], rep
# print '%s' % data # print '%s' % data
for name, val in data.items(): for name, val in data.items():
@ -71,11 +71,11 @@ def parse_iprep(ip, api):
misp_val = context misp_val = context
full_text += '\n%s' % context full_text += '\n%s' % context
misp_comment = 'IPRep Source %s: %s' % (name, val['last_seen']) misp_comment = 'IPRep Source %s: %s' % (name, val['last_seen'])
rep.append({'types': mispattributes['output'], 'categories':['External analysis'], 'values': misp_val, 'comment': misp_comment}) rep.append({'types': mispattributes['output'], 'categories': ['External analysis'], 'values': misp_val, 'comment': misp_comment})
except: except Exception:
err.append('Error parsing source: %s' % name) err.append('Error parsing source: %s' % name)
rep.append({'types': ['freetext'], 'values': full_text , 'comment': 'Free text import of IPRep'}) rep.append({'types': ['freetext'], 'values': full_text, 'comment': 'Free text import of IPRep'})
return err, rep return err, rep

View File

@ -86,32 +86,30 @@ def handler(q=False):
response.block_details.date_updated.strftime('%d %B %Y') if response.block_details.date_updated else None response.block_details.date_updated.strftime('%d %B %Y') if response.block_details.date_updated else None
results = { results = {
'results': [ 'results':
{'types': ['text'], 'values': [{'types': ['text'], 'values':
{ {
# Mac address details # Mac address details
'Valid MAC address': "True" if response.mac_address_details.is_valid else "False", 'Valid MAC address': "True" if response.mac_address_details.is_valid else "False",
'Transmission type': response.mac_address_details.transmission_type, 'Transmission type': response.mac_address_details.transmission_type,
'Administration type': response.mac_address_details.administration_type, 'Administration type': response.mac_address_details.administration_type,
# Vendor details # Vendor details
'OUI': response.vendor_details.oui, 'OUI': response.vendor_details.oui,
'Vendor details are hidden': "True" if response.vendor_details.is_private else "False", 'Vendor details are hidden': "True" if response.vendor_details.is_private else "False",
'Company name': response.vendor_details.company_name, 'Company name': response.vendor_details.company_name,
'Company\'s address': response.vendor_details.company_address, 'Company\'s address': response.vendor_details.company_address,
'County code': response.vendor_details.country_code, 'County code': response.vendor_details.country_code,
# Block details # Block details
'Block found': "True" if response.block_details.block_found else "False", 'Block found': "True" if response.block_details.block_found else "False",
'The left border of the range': response.block_details.border_left, 'The left border of the range': response.block_details.border_left,
'The right border of the range': response.block_details.border_right, 'The right border of the range': response.block_details.border_right,
'The total number of MAC addresses in this range': response.block_details.block_size, 'The total number of MAC addresses in this range': response.block_details.block_size,
'Assignment block size': response.block_details.assignment_block_size, 'Assignment block size': response.block_details.assignment_block_size,
'Date when the range was allocated': date_created, 'Date when the range was allocated': date_created,
'Date when the range was last updated': date_updated 'Date when the range was last updated': date_updated
} }}]
}
]
} }
return results return results

View File

@ -1,4 +1,3 @@
import json
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import json import json
@ -9,7 +8,8 @@ except ImportError:
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'], 'output': ['hostname', 'domain', 'ip-src', 'ip-dst','url']} mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'],
'output': ['hostname', 'domain', 'ip-src', 'ip-dst', 'url']}
# possible module-types: 'expansion', 'hover' or both # possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven', moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven',
'description': 'Query on Onyphe', 'description': 'Query on Onyphe',
@ -54,7 +54,7 @@ def handle_expansion(api, ip, misperrors):
misperrors['error'] = result['message'] misperrors['error'] = result['message']
return misperrors return misperrors
categories = list(set([item['@category'] for item in result['results']])) # categories = list(set([item['@category'] for item in result['results']]))
result_filtered = {"results": []} result_filtered = {"results": []}
urls_pasties = [] urls_pasties = []
@ -72,9 +72,9 @@ def handle_expansion(api, ip, misperrors):
os_target = r['os'] os_target = r['os']
if os_target != 'Unknown': if os_target != 'Unknown':
os_list.append(r['os']) os_list.append(r['os'])
elif r['@category'] == 'resolver' and r['type'] =='reverse': elif r['@category'] == 'resolver' and r['type'] == 'reverse':
domains_resolver.append(r['reverse']) domains_resolver.append(r['reverse'])
elif r['@category'] == 'resolver' and r['type'] =='forward': elif r['@category'] == 'resolver' and r['type'] == 'forward':
domains_forward.append(r['forward']) domains_forward.append(r['forward'])
result_filtered['results'].append({'types': ['url'], 'values': urls_pasties, result_filtered['results'].append({'types': ['url'], 'values': urls_pasties,
@ -90,7 +90,7 @@ def handle_expansion(api, ip, misperrors):
result_filtered['results'].append({'types': ['domain'], result_filtered['results'].append({'types': ['domain'],
'values': list(set(domains_resolver)), 'values': list(set(domains_resolver)),
'categories': ['Network activity'], 'categories': ['Network activity'],
'comment': 'resolver to %s' % ip }) 'comment': 'resolver to %s' % ip})
result_filtered['results'].append({'types': ['domain'], result_filtered['results'].append({'types': ['domain'],
'values': list(set(domains_forward)), 'values': list(set(domains_forward)),

View File

@ -1,4 +1,3 @@
import json
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import json import json
@ -10,7 +9,7 @@ except ImportError:
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'], mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'],
'output': ['hostname', 'domain', 'ip-src', 'ip-dst','url']} 'output': ['hostname', 'domain', 'ip-src', 'ip-dst', 'url']}
# possible module-types: 'expansion', 'hover' or both # possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven', moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven',
@ -38,10 +37,10 @@ def handler(q=False):
ip = '' ip = ''
if request.get('ip-src'): if request.get('ip-src'):
ip = request['ip-src'] ip = request['ip-src']
return handle_ip(api ,ip, misperrors) return handle_ip(api, ip, misperrors)
elif request.get('ip-dst'): elif request.get('ip-dst'):
ip = request['ip-dst'] ip = request['ip-dst']
return handle_ip(api,ip,misperrors) return handle_ip(api, ip, misperrors)
elif request.get('domain'): elif request.get('domain'):
domain = request['domain'] domain = request['domain']
return handle_domain(api, domain, misperrors) return handle_domain(api, domain, misperrors)
@ -91,11 +90,11 @@ def handle_ip(api, ip, misperrors):
r, status_ok = expand_syscan(api, ip, misperrors) r, status_ok = expand_syscan(api, ip, misperrors)
if status_ok: if status_ok:
result_filtered['results'].extend(r) result_filtered['results'].extend(r)
else: else:
misperrors['error'] = "Error syscan result" misperrors['error'] = "Error syscan result"
r, status_ok = expand_pastries(api,misperrors,ip=ip) r, status_ok = expand_pastries(api, misperrors, ip=ip)
if status_ok: if status_ok:
result_filtered['results'].extend(r) result_filtered['results'].extend(r)
@ -185,11 +184,11 @@ def expand_syscan(api, ip, misperror):
return r, status_ok return r, status_ok
def expand_datascan(api, misperror,**kwargs): def expand_datascan(api, misperror, **kwargs):
status_ok = False status_ok = False
r = [] r = []
ip = '' # ip = ''
query ='' query = ''
asn_list = [] asn_list = []
geoloc = [] geoloc = []
orgs = [] orgs = []
@ -311,7 +310,7 @@ def expand_pastries(api, misperror, **kwargs):
query = kwargs.get('domain') query = kwargs.get('domain')
result = api.search_pastries('domain:%s' % query) result = api.search_pastries('domain:%s' % query)
if result['status'] =='ok': if result['status'] == 'ok':
status_ok = True status_ok = True
for item in result['results']: for item in result['results']:
if item['@category'] == 'pastries': if item['@category'] == 'pastries':
@ -328,7 +327,7 @@ def expand_pastries(api, misperror, **kwargs):
r.append({'types': ['url'], r.append({'types': ['url'],
'values': urls_pasties, 'values': urls_pasties,
'categories': ['External analysis'], 'categories': ['External analysis'],
'comment':'URLs of pasties where %s has found' % query}) 'comment': 'URLs of pasties where %s has found' % query})
r.append({'types': ['domain'], 'values': list(set(domains)), r.append({'types': ['domain'], 'values': list(set(domains)),
'categories': ['Network activity'], 'categories': ['Network activity'],
'comment': 'Domains found in pasties of Onyphe'}) 'comment': 'Domains found in pasties of Onyphe'})
@ -340,7 +339,7 @@ def expand_pastries(api, misperror, **kwargs):
return r, status_ok return r, status_ok
def expand_threatlist(api, misperror,**kwargs): def expand_threatlist(api, misperror, **kwargs):
status_ok = False status_ok = False
r = [] r = []
@ -366,7 +365,8 @@ def expand_threatlist(api, misperror,**kwargs):
'comment': '%s is present in threatlist' % query 'comment': '%s is present in threatlist' % query
}) })
return r,status_ok return r, status_ok
def introspection(): def introspection():
return mispattributes return mispattributes

View File

@ -15,9 +15,10 @@ moduleinfo = {'version': '1', 'author': 'chrisdoman',
# We're not actually using the API key yet # We're not actually using the API key yet
moduleconfig = ["apikey"] moduleconfig = ["apikey"]
# Avoid adding windows update to enrichment etc. # Avoid adding windows update to enrichment etc.
def isBlacklisted(value): def isBlacklisted(value):
blacklist = ['0.0.0.0', '8.8.8.8', '255.255.255.255', '192.168.56.' , 'time.windows.com'] blacklist = ['0.0.0.0', '8.8.8.8', '255.255.255.255', '192.168.56.', 'time.windows.com']
for b in blacklist: for b in blacklist:
if value in b: if value in b:
@ -25,10 +26,12 @@ def isBlacklisted(value):
return True return True
def valid_ip(ip): def valid_ip(ip):
m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip) m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip)
return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups())) return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups()))
def findAll(data, keys): def findAll(data, keys):
a = [] a = []
if isinstance(data, dict): if isinstance(data, dict):
@ -43,9 +46,11 @@ def findAll(data, keys):
a.extend(findAll(i, keys)) a.extend(findAll(i, keys))
return a return a
def valid_email(email): def valid_email(email):
return bool(re.search(r"[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?", email)) return bool(re.search(r"[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?", email))
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -99,19 +104,17 @@ def getHash(_hash, key):
def getIP(ip, key): def getIP(ip, key):
ret = [] ret = []
req = json.loads( requests.get("https://otx.alienvault.com/otxapi/indicator/ip/malware/" + ip + "?limit=1000").text ) req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/ip/malware/" + ip + "?limit=1000").text)
for _hash in findAll(req, "hash"): for _hash in findAll(req, "hash"):
ret.append({"types": ["sha256"], "values": [_hash]}) ret.append({"types": ["sha256"], "values": [_hash]})
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/ip/passive_dns/" + ip).text)
req = json.loads( requests.get("https://otx.alienvault.com/otxapi/indicator/ip/passive_dns/" + ip).text )
for hostname in findAll(req, "hostname"): for hostname in findAll(req, "hostname"):
if not isBlacklisted(hostname): if not isBlacklisted(hostname):
ret.append({"types": ["hostname"], "values": [hostname]}) ret.append({"types": ["hostname"], "values": [hostname]})
return ret return ret
@ -119,7 +122,7 @@ def getDomain(domain, key):
ret = [] ret = []
req = json.loads( requests.get("https://otx.alienvault.com/otxapi/indicator/domain/malware/" + domain + "?limit=1000").text ) req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/domain/malware/" + domain + "?limit=1000").text)
for _hash in findAll(req, "hash"): for _hash in findAll(req, "hash"):
ret.append({"types": ["sha256"], "values": [_hash]}) ret.append({"types": ["sha256"], "values": [_hash]})
@ -144,6 +147,7 @@ def getDomain(domain, key):
return ret return ret
def introspection(): def introspection():
return mispattributes return mispattributes

View File

@ -331,7 +331,7 @@ def handler(q=False):
output['results'] += results output['results'] += results
else: else:
log.error("Unsupported query pattern issued.") log.error("Unsupported query pattern issued.")
except: except Exception:
return misperrors return misperrors
return output return output

View File

@ -1,5 +1,5 @@
import json import json
import datetime import sys
try: try:
import dns.resolver import dns.resolver
@ -18,64 +18,65 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
moduleconfig = [] moduleconfig = []
rbls = { rbls = {
'spam.spamrats.com': 'http://www.spamrats.com', 'spam.spamrats.com': 'http://www.spamrats.com',
'spamguard.leadmon.net': 'http://www.leadmon.net/SpamGuard/', 'spamguard.leadmon.net': 'http://www.leadmon.net/SpamGuard/',
'rbl-plus.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html', 'rbl-plus.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'web.dnsbl.sorbs.net': 'http://www.sorbs.net', 'web.dnsbl.sorbs.net': 'http://www.sorbs.net',
'ix.dnsbl.manitu.net': 'http://www.dnsbl.manitu.net', 'ix.dnsbl.manitu.net': 'http://www.dnsbl.manitu.net',
'virus.rbl.jp': 'http://www.rbl.jp', 'virus.rbl.jp': 'http://www.rbl.jp',
'dul.dnsbl.sorbs.net': 'http://www.sorbs.net', 'dul.dnsbl.sorbs.net': 'http://www.sorbs.net',
'bogons.cymru.com': 'http://www.team-cymru.org/Services/Bogons/', 'bogons.cymru.com': 'http://www.team-cymru.org/Services/Bogons/',
'psbl.surriel.com': 'http://psbl.surriel.com', 'psbl.surriel.com': 'http://psbl.surriel.com',
'misc.dnsbl.sorbs.net': 'http://www.sorbs.net', 'misc.dnsbl.sorbs.net': 'http://www.sorbs.net',
'httpbl.abuse.ch': 'http://dnsbl.abuse.ch', 'httpbl.abuse.ch': 'http://dnsbl.abuse.ch',
'combined.njabl.org': 'http://combined.njabl.org', 'combined.njabl.org': 'http://combined.njabl.org',
'smtp.dnsbl.sorbs.net': 'http://www.sorbs.net', 'smtp.dnsbl.sorbs.net': 'http://www.sorbs.net',
'korea.services.net': 'http://korea.services.net', 'korea.services.net': 'http://korea.services.net',
'drone.abuse.ch': 'http://dnsbl.abuse.ch', 'drone.abuse.ch': 'http://dnsbl.abuse.ch',
'rbl.efnetrbl.org': 'http://rbl.efnetrbl.org', 'rbl.efnetrbl.org': 'http://rbl.efnetrbl.org',
'cbl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US', 'cbl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US',
'b.barracudacentral.org': 'http://www.barracudacentral.org/rbl/removal-request', 'b.barracudacentral.org': 'http://www.barracudacentral.org/rbl/removal-request',
'bl.spamcannibal.org': 'http://www.spamcannibal.org', 'bl.spamcannibal.org': 'http://www.spamcannibal.org',
'xbl.spamhaus.org': 'http://www.spamhaus.org/xbl/', 'xbl.spamhaus.org': 'http://www.spamhaus.org/xbl/',
'zen.spamhaus.org': 'http://www.spamhaus.org/zen/', 'zen.spamhaus.org': 'http://www.spamhaus.org/zen/',
'rbl.suresupport.com': 'http://suresupport.com/postmaster', 'rbl.suresupport.com': 'http://suresupport.com/postmaster',
'db.wpbl.info': 'http://www.wpbl.info', 'db.wpbl.info': 'http://www.wpbl.info',
'sbl.spamhaus.org': 'http://www.spamhaus.org/sbl/', 'sbl.spamhaus.org': 'http://www.spamhaus.org/sbl/',
'http.dnsbl.sorbs.net': 'http://www.sorbs.net', 'http.dnsbl.sorbs.net': 'http://www.sorbs.net',
'csi.cloudmark.com': 'http://www.cloudmark.com/en/products/cloudmark-sender-intelligence/index', 'csi.cloudmark.com': 'http://www.cloudmark.com/en/products/cloudmark-sender-intelligence/index',
'rbl.interserver.net': 'http://rbl.interserver.net', 'rbl.interserver.net': 'http://rbl.interserver.net',
'ubl.unsubscore.com': 'http://www.lashback.com/blacklist/', 'ubl.unsubscore.com': 'http://www.lashback.com/blacklist/',
'dnsbl.sorbs.net': 'http://www.sorbs.net', 'dnsbl.sorbs.net': 'http://www.sorbs.net',
'virbl.bit.nl': 'http://virbl.bit.nl', 'virbl.bit.nl': 'http://virbl.bit.nl',
'pbl.spamhaus.org': 'http://www.spamhaus.org/pbl/', 'pbl.spamhaus.org': 'http://www.spamhaus.org/pbl/',
'socks.dnsbl.sorbs.net': 'http://www.sorbs.net', 'socks.dnsbl.sorbs.net': 'http://www.sorbs.net',
'short.rbl.jp': 'http://www.rbl.jp', 'short.rbl.jp': 'http://www.rbl.jp',
'dnsbl.dronebl.org': 'http://www.dronebl.org', 'dnsbl.dronebl.org': 'http://www.dronebl.org',
'blackholes.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html', 'blackholes.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'truncate.gbudb.net': 'http://www.gbudb.com/truncate/index.jsp', 'truncate.gbudb.net': 'http://www.gbudb.com/truncate/index.jsp',
'dyna.spamrats.com': 'http://www.spamrats.com', 'dyna.spamrats.com': 'http://www.spamrats.com',
'spamrbl.imp.ch': 'http://antispam.imp.ch', 'spamrbl.imp.ch': 'http://antispam.imp.ch',
'spam.dnsbl.sorbs.net': 'http://www.sorbs.net', 'spam.dnsbl.sorbs.net': 'http://www.sorbs.net',
'wormrbl.imp.ch': 'http://antispam.imp.ch', 'wormrbl.imp.ch': 'http://antispam.imp.ch',
'query.senderbase.org': 'http://www.senderbase.org/about', 'query.senderbase.org': 'http://www.senderbase.org/about',
'opm.tornevall.org': 'http://dnsbl.tornevall.org', 'opm.tornevall.org': 'http://dnsbl.tornevall.org',
'netblock.pedantic.org': 'http://pedantic.org', 'netblock.pedantic.org': 'http://pedantic.org',
'access.redhawk.org': 'http://www.redhawk.org/index.php?option=com_wrapper&Itemid=33', 'access.redhawk.org': 'http://www.redhawk.org/index.php?option=com_wrapper&Itemid=33',
'cdl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US', 'cdl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US',
'multi.surbl.org': 'http://www.surbl.org', 'multi.surbl.org': 'http://www.surbl.org',
'noptr.spamrats.com': 'http://www.spamrats.com', 'noptr.spamrats.com': 'http://www.spamrats.com',
'dnsbl.inps.de': 'http://dnsbl.inps.de/index.cgi?lang=en', 'dnsbl.inps.de': 'http://dnsbl.inps.de/index.cgi?lang=en',
'bl.spamcop.net': 'http://bl.spamcop.net', 'bl.spamcop.net': 'http://bl.spamcop.net',
'cbl.abuseat.org': 'http://cbl.abuseat.org', 'cbl.abuseat.org': 'http://cbl.abuseat.org',
'dsn.rfc-ignorant.org': 'http://www.rfc-ignorant.org/policy-dsn.php', 'dsn.rfc-ignorant.org': 'http://www.rfc-ignorant.org/policy-dsn.php',
'zombie.dnsbl.sorbs.net': 'http://www.sorbs.net', 'zombie.dnsbl.sorbs.net': 'http://www.sorbs.net',
'dnsbl.njabl.org': 'http://dnsbl.njabl.org', 'dnsbl.njabl.org': 'http://dnsbl.njabl.org',
'relays.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html', 'relays.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'rbl.spamlab.com': 'http://tools.appriver.com/index.aspx?tool=rbl', 'rbl.spamlab.com': 'http://tools.appriver.com/index.aspx?tool=rbl',
'all.bl.blocklist.de': 'http://www.blocklist.de/en/rbldns.html' 'all.bl.blocklist.de': 'http://www.blocklist.de/en/rbldns.html'
} }
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -89,11 +90,11 @@ def handler(q=False):
return misperrors return misperrors
listed = [] listed = []
info = [] info = []
ipRev = '.'.join(ip.split('.')[::-1]) ipRev = '.'.join(ip.split('.')[::-1])
for rbl in rbls: for rbl in rbls:
query = '{}.{}'.format(ipRev, rbl) query = '{}.{}'.format(ipRev, rbl)
try: try:
txt = resolver.query(query,'TXT') txt = resolver.query(query, 'TXT')
listed.append(query) listed.append(query)
info.append([str(t) for t in txt]) info.append([str(t) for t in txt])
except Exception: except Exception:
@ -101,9 +102,11 @@ def handler(q=False):
result = "\n".join(["{}: {}".format(l, " - ".join(i)) for l, i in zip(listed, info)]) result = "\n".join(["{}: {}".format(l, " - ".join(i)) for l, i in zip(listed, info)])
return {'results': [{'types': mispattributes.get('output'), 'values': result}]} return {'results': [{'types': mispattributes.get('output'), 'values': result}]}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -1,5 +1,5 @@
import json import json
import dns.reversename, dns.resolver from dns import reversename, resolver, exception
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'domain|ip'], 'output': ['hostname']} mispattributes = {'input': ['ip-src', 'ip-dst', 'domain|ip'], 'output': ['hostname']}
@ -12,6 +12,7 @@ moduleinfo = {'version': '0.1', 'author': 'Andreas Muehlemann',
# config fields that your code expects from the site admin # config fields that your code expects from the site admin
moduleconfig = ['nameserver'] moduleconfig = ['nameserver']
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -26,9 +27,9 @@ def handler(q=False):
return False return False
# reverse lookup for ip # reverse lookup for ip
revname = dns.reversename.from_address(toquery) revname = reversename.from_address(toquery)
r = dns.resolver.Resolver() r = resolver.Resolver()
r.timeout = 2 r.timeout = 2
r.lifetime = 2 r.lifetime = 2
@ -42,13 +43,13 @@ def handler(q=False):
try: try:
answer = r.query(revname, 'PTR') answer = r.query(revname, 'PTR')
except dns.resolver.NXDOMAIN: except resolver.NXDOMAIN:
misperrors['error'] = "NXDOMAIN" misperrors['error'] = "NXDOMAIN"
return misperrors return misperrors
except dns.exception.Timeout: except exception.Timeout:
misperrors['error'] = "Timeout" misperrors['error'] = "Timeout"
return misperrors return misperrors
except: except Exception:
misperrors['error'] = "DNS resolving error" misperrors['error'] = "DNS resolving error"
return misperrors return misperrors
@ -56,9 +57,11 @@ def handler(q=False):
'values':[str(answer[0])]}]} 'values':[str(answer[0])]}]}
return r return r
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -114,8 +114,7 @@ def handle_domain(api, domain, misperrors):
if r: if r:
result_filtered['results'].extend(r) result_filtered['results'].extend(r)
else: else:
misperrors['error'] = misperrors[ misperrors['error'] = misperrors['error'] + ' Error in expand History DNS'
'error'] + ' Error in expand History DNS'
return misperrors return misperrors
r, status_ok = expand_history_whois(api, domain) r, status_ok = expand_history_whois(api, domain)
@ -124,8 +123,7 @@ def handle_domain(api, domain, misperrors):
if r: if r:
result_filtered['results'].extend(r) result_filtered['results'].extend(r)
else: else:
misperrors['error'] = misperrors['error'] + \ misperrors['error'] = misperrors['error'] + ' Error in expand History Whois'
' Error in expand History Whois'
return misperrors return misperrors
return result_filtered return result_filtered

View File

@ -1,4 +1,6 @@
import sys, os, io, json import sys
import io
import json
try: try:
from sigma.parser import SigmaCollectionParser from sigma.parser import SigmaCollectionParser
from sigma.config import SigmaConfiguration from sigma.config import SigmaConfiguration
@ -13,6 +15,7 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer', 'module-type': ['e
moduleconfig = [] moduleconfig = []
sigma_targets = ('es-dsl', 'es-qs', 'graylog', 'kibana', 'xpack-watcher', 'logpoint', 'splunk', 'grep', 'wdatp', 'splunkxml', 'arcsight', 'qualys') sigma_targets = ('es-dsl', 'es-qs', 'graylog', 'kibana', 'xpack-watcher', 'logpoint', 'splunk', 'grep', 'wdatp', 'splunkxml', 'arcsight', 'qualys')
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -35,16 +38,18 @@ def handler(q=False):
backend.finalize() backend.finalize()
print("#NEXT") print("#NEXT")
targets.append(t) targets.append(t)
except: except Exception:
continue continue
sys.stdout = old_stdout sys.stdout = old_stdout
results = result.getvalue()[:-5].split('#NEXT') results = result.getvalue()[:-5].split('#NEXT')
d_result = {t: r.strip() for t,r in zip(targets, results)} d_result = {t: r.strip() for t, r in zip(targets, results)}
return {'results': [{'types': mispattributes['output'], 'values': d_result}]} return {'results': [{'types': mispattributes['output'], 'values': d_result}]}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -12,6 +12,7 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer', 'module-type': ['e
'description': 'An expansion hover module to perform a syntax check on sigma rules'} 'description': 'An expansion hover module to perform a syntax check on sigma rules'}
moduleconfig = [] moduleconfig = []
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -27,9 +28,11 @@ def handler(q=False):
result = ("Syntax error: {}".format(str(e))) result = ("Syntax error: {}".format(str(e)))
return {'results': [{'types': mispattributes['output'], 'values': result}]} return {'results': [{'types': mispattributes['output'], 'values': result}]}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -10,6 +10,7 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer', 'module-type': ['e
'description': 'An expansion hover module to perform a syntax check on stix2 patterns.'} 'description': 'An expansion hover module to perform a syntax check on stix2 patterns.'}
moduleconfig = [] moduleconfig = []
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -27,16 +28,18 @@ def handler(q=False):
if syntax_errors: if syntax_errors:
s = 's' if len(syntax_errors) > 1 else '' s = 's' if len(syntax_errors) > 1 else ''
s_errors = "" s_errors = ""
for error in syntax_errors: for error in syntax_errors:
s_errors += "{}\n".format(error[6:]) s_errors += "{}\n".format(error[6:])
result = "Syntax error{}: \n{}".format(s, s_errors[:-1]) result = "Syntax error{}: \n{}".format(s, s_errors[:-1])
else: else:
result = "Syntax valid" result = "Syntax valid"
return {'results': [{'types': mispattributes['output'], 'values': result}]} return {'results': [{'types': mispattributes['output'], 'values': result}]}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -17,7 +17,7 @@ moduleconfig = []
# Avoid adding windows update to enrichment etc. # Avoid adding windows update to enrichment etc.
def isBlacklisted(value): def isBlacklisted(value):
blacklist = ['8.8.8.8', '255.255.255.255', '192.168.56.' , 'time.windows.com'] blacklist = ['8.8.8.8', '255.255.255.255', '192.168.56.', 'time.windows.com']
for b in blacklist: for b in blacklist:
if value in b: if value in b:
@ -25,28 +25,31 @@ def isBlacklisted(value):
return False return False
def valid_ip(ip): def valid_ip(ip):
m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip) m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip)
return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups())) return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups()))
def valid_domain(hostname): def valid_domain(hostname):
if len(hostname) > 255: if len(hostname) > 255:
return False return False
if hostname[-1] == ".": if hostname[-1] == ".":
hostname = hostname[:-1] # strip exactly one dot from the right, if present hostname = hostname[:-1] # strip exactly one dot from the right, if present
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE) allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split(".")) return all(allowed.match(x) for x in hostname.split("."))
def valid_email(email): def valid_email(email):
return bool(re.search(r"^[\w\.\+\-]+\@[\w]+\.[a-z]{2,3}$", email)) return bool(re.search(r"^[\w\.\+\-]+\@[\w]+\.[a-z]{2,3}$", email))
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
q = json.loads(q) q = json.loads(q)
r = {"results": []} r = {"results": []}
if "ip-src" in q: if "ip-src" in q:
@ -98,7 +101,7 @@ def getHash(hash):
def getIP(ip): def getIP(ip):
ret = [] ret = []
req = json.loads( requests.get("https://www.threatcrowd.org/searchApi/v2/ip/report/?ip=" + ip).text ) req = json.loads(requests.get("https://www.threatcrowd.org/searchApi/v2/ip/report/?ip=" + ip).text)
if "resolutions" in req: if "resolutions" in req:
for dns in req["resolutions"]: for dns in req["resolutions"]:
@ -110,11 +113,9 @@ def getIP(ip):
for hash in req["hashes"]: for hash in req["hashes"]:
ret.append({"types": ["md5"], "values": [hash]}) ret.append({"types": ["md5"], "values": [hash]})
return ret return ret
def getEmail(email): def getEmail(email):
ret = [] ret = []
j = requests.get("https://www.threatcrowd.org/searchApi/v2/email/report/?email=" + email).text j = requests.get("https://www.threatcrowd.org/searchApi/v2/email/report/?email=" + email).text
@ -129,11 +130,10 @@ def getEmail(email):
return ret return ret
def getDomain(domain): def getDomain(domain):
ret = [] ret = []
req = json.loads( requests.get("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=" + domain).text ) req = json.loads(requests.get("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=" + domain).text)
if "resolutions" in req: if "resolutions" in req:
for dns in req["resolutions"]: for dns in req["resolutions"]:
@ -148,9 +148,9 @@ def getDomain(domain):
for hash in req["hashes"]: for hash in req["hashes"]:
ret.append({"types": ["md5"], "values": [hash]}) ret.append({"types": ["md5"], "values": [hash]})
return ret return ret
def introspection(): def introspection():
return mispattributes return mispattributes

View File

@ -1,7 +1,5 @@
import json import json
import requests import requests
from requests import HTTPError
import base64
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
mispattributes = {'input': ['hostname', 'domain', 'ip-src', 'ip-dst', 'md5', 'sha1', 'sha256', 'sha512'], mispattributes = {'input': ['hostname', 'domain', 'ip-src', 'ip-dst', 'md5', 'sha1', 'sha256', 'sha512'],
@ -61,27 +59,27 @@ def get_domain(q):
continue continue
for result in results: for result in results:
if flag == 1: #whois if flag == 1: # whois
emails = result.get('whois', {}).get('emails') emails = result.get('whois', {}).get('emails')
if not emails: if not emails:
continue continue
for em_type, email in emails.items(): for em_type, email in emails.items():
ret.append({'types': ['whois-registrant-email'], 'values': [email], 'comment': desc.format(q, 'whois')}) ret.append({'types': ['whois-registrant-email'], 'values': [email], 'comment': desc.format(q, 'whois')})
if flag == 2: #pdns if flag == 2: # pdns
ip = result.get('ip') ip = result.get('ip')
if ip: if ip:
ret.append({'types': ['ip-src', 'ip-dst'], 'values': [ip], 'comment': desc.format(q, 'pdns')}) ret.append({'types': ['ip-src', 'ip-dst'], 'values': [ip], 'comment': desc.format(q, 'pdns')})
if flag == 3: #uri if flag == 3: # uri
uri = result.get('uri') uri = result.get('uri')
if uri: if uri:
ret.append({'types': ['url'], 'values': [uri], 'comment': desc.format(q, 'uri')}) ret.append({'types': ['url'], 'values': [uri], 'comment': desc.format(q, 'uri')})
if flag == 4: #samples if flag == 4: # samples
if type(result) is str: if type(result) is str:
ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'samples')}) ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'samples')})
if flag == 5: #subdomains if flag == 5: # subdomains
if type(result) is str: if type(result) is str:
ret.append({'types': ['domain'], 'values': [result], 'comment': desc.format(q, 'subdomain')}) ret.append({'types': ['domain'], 'values': [result], 'comment': desc.format(q, 'subdomain')})
if flag == 6: #reports if flag == 6: # reports
link = result.get('URL') link = result.get('URL')
if link: if link:
ret.append({'types': ['url'], 'values': [link], 'comment': desc.format(q, 'report')}) ret.append({'types': ['url'], 'values': [link], 'comment': desc.format(q, 'report')})
@ -100,27 +98,27 @@ def get_ip(q):
continue continue
for result in results: for result in results:
if flag == 1: #whois if flag == 1: # whois
emails = result.get('whois', {}).get('emails') emails = result.get('whois', {}).get('emails')
if not emails: if not emails:
continue continue
for em_type, email in emails.items(): for em_type, email in emails.items():
ret.append({'types': ['whois-registrant-email'], 'values': [email], 'comment': desc.format(q, 'whois')}) ret.append({'types': ['whois-registrant-email'], 'values': [email], 'comment': desc.format(q, 'whois')})
if flag == 2: #pdns if flag == 2: # pdns
ip = result.get('ip') ip = result.get('ip')
if ip: if ip:
ret.append({'types': ['ip-src', 'ip-dst'], 'values': [ip], 'comment': desc.format(q, 'pdns')}) ret.append({'types': ['ip-src', 'ip-dst'], 'values': [ip], 'comment': desc.format(q, 'pdns')})
if flag == 3: #uri if flag == 3: # uri
uri = result.get('uri') uri = result.get('uri')
if uri: if uri:
ret.append({'types': ['url'], 'values': [uri], 'comment': desc.format(q, 'uri')}) ret.append({'types': ['url'], 'values': [uri], 'comment': desc.format(q, 'uri')})
if flag == 4: #samples if flag == 4: # samples
if type(result) is str: if type(result) is str:
ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'samples')}) ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'samples')})
if flag == 5: #ssl if flag == 5: # ssl
if type(result) is str: if type(result) is str:
ret.append({'types': ['x509-fingerprint-sha1'], 'values': [result], 'comment': desc.format(q, 'ssl')}) ret.append({'types': ['x509-fingerprint-sha1'], 'values': [result], 'comment': desc.format(q, 'ssl')})
if flag == 6: #reports if flag == 6: # reports
link = result.get('URL') link = result.get('URL')
if link: if link:
ret.append({'types': ['url'], 'values': [link], 'comment': desc.format(q, 'report')}) ret.append({'types': ['url'], 'values': [link], 'comment': desc.format(q, 'report')})
@ -139,11 +137,11 @@ def get_hash(q):
continue continue
for result in results: for result in results:
if flag == 1: #meta (filename) if flag == 1: # meta (filename)
name = result.get('file_name') name = result.get('file_name')
if name: if name:
ret.append({'types': ['filename'], 'values': [name], 'comment': desc.format(q, 'file')}) ret.append({'types': ['filename'], 'values': [name], 'comment': desc.format(q, 'file')})
if flag == 3: #network if flag == 3: # network
domains = result.get('domains') domains = result.get('domains')
for dom in domains: for dom in domains:
if dom.get('domain'): if dom.get('domain'):
@ -153,12 +151,12 @@ def get_hash(q):
for h in hosts: for h in hosts:
if type(h) is str: if type(h) is str:
ret.append({'types': ['ip-src', 'ip-dst'], 'values': [h], 'comment': desc.format(q, 'network')}) ret.append({'types': ['ip-src', 'ip-dst'], 'values': [h], 'comment': desc.format(q, 'network')})
if flag == 6: #detections if flag == 6: # detections
detections = result.get('av_detections') detections = result.get('av_detections')
for d in detections: for d in detections:
if d.get('detection'): if d.get('detection'):
ret.append({'types': ['text'], 'values': [d['detection']], 'comment': desc.format(q, 'detection')}) ret.append({'types': ['text'], 'values': [d['detection']], 'comment': desc.format(q, 'detection')})
if flag == 7: #report if flag == 7: # report
if type(result) is str: if type(result) is str:
ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'report')}) ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'report')})

View File

@ -169,8 +169,8 @@ def lookup_indicator(client, query):
'categories': ['External analysis'], 'categories': ['External analysis'],
'values': image_url, 'values': image_url,
'comment': misp_comment}) 'comment': misp_comment})
### TO DO ### # ## TO DO ###
### Add ability to add an in-line screenshot of the target website into an attribute # ## Add ability to add an in-line screenshot of the target website into an attribute
# screenshot = requests.get(image_url).content # screenshot = requests.get(image_url).content
# r.append({'types': ['attachment'], # r.append({'types': ['attachment'],
# 'categories': ['External analysis'], # 'categories': ['External analysis'],

View File

@ -19,6 +19,7 @@ moduleconfig = ["apikey", "event_limit"]
comment = '{}: Enriched via VirusTotal' comment = '{}: Enriched via VirusTotal'
hash_types = ["md5", "sha1", "sha256", "sha512"] hash_types = ["md5", "sha1", "sha256", "sha512"]
class VirusTotalRequest(object): class VirusTotalRequest(object):
def __init__(self, config): def __init__(self, config):
self.apikey = config['apikey'] self.apikey = config['apikey']
@ -146,6 +147,7 @@ class VirusTotalRequest(object):
self.to_return.append({"types": ["malware-sample"], "categories": ["Payload delivery"], self.to_return.append({"types": ["malware-sample"], "categories": ["Payload delivery"],
"values": data["submimssion_names"], "data": str(base64.b64encore(malsample), 'utf-8')}) "values": data["submimssion_names"], "data": str(base64.b64encore(malsample), 'utf-8')})
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -161,9 +163,11 @@ def handler(q=False):
return misperrors return misperrors
return {'results': r} return {'results': r}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -40,12 +40,12 @@ def handler(q=False):
data = request.get("data") data = request.get("data")
if 'malware-sample' in request: if 'malware-sample' in request:
# malicious samples are encrypted with zip (password infected) and then base64 encoded # malicious samples are encrypted with zip (password infected) and then base64 encoded
sample_filename = request.get("malware-sample").split("|",1)[0] sample_filename = request.get("malware-sample").split("|", 1)[0]
data = base64.b64decode(data) data = base64.b64decode(data)
fl = io.BytesIO(data) fl = io.BytesIO(data)
zf = zipfile.ZipFile(fl) zf = zipfile.ZipFile(fl)
sample_hashname = zf.namelist()[0] sample_hashname = zf.namelist()[0]
data = zf.read(sample_hashname,b"infected") data = zf.read(sample_hashname, b"infected")
zf.close() zf.close()
elif 'attachment' in request: elif 'attachment' in request:
# All attachments get base64 encoded # All attachments get base64 encoded
@ -55,7 +55,7 @@ def handler(q=False):
else: else:
misperrors['error'] = "No malware sample or attachment supplied" misperrors['error'] = "No malware sample or attachment supplied"
return misperrors return misperrors
except: except Exception:
misperrors['error'] = "Unable to process submited sample data" misperrors['error'] = "Unable to process submited sample data"
return misperrors return misperrors
@ -102,7 +102,7 @@ def handler(q=False):
return misperrors return misperrors
else: else:
return vmrayProcess(vmraydata) return vmrayProcess(vmraydata)
except: except Exception:
misperrors['error'] = "Problem when calling API." misperrors['error'] = "Problem when calling API."
return misperrors return misperrors
else: else:
@ -148,7 +148,7 @@ def vmrayProcess(vmraydata):
else: else:
misperrors['error'] = "No valid results returned." misperrors['error'] = "No valid results returned."
return misperrors return misperrors
except: except Exception:
misperrors['error'] = "No valid submission data returned." misperrors['error'] = "No valid submission data returned."
return misperrors return misperrors
else: else:

View File

@ -24,8 +24,8 @@ log.addHandler(ch)
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
mispattributes = { mispattributes = {
'input': ['vulnerability'], 'input': ['vulnerability'],
'output': ['text', 'link', 'cpe']} 'output': ['text', 'link', 'cpe']}
moduleinfo = {'version': '0.1', 'author': 'Koen Van Impe', moduleinfo = {'version': '0.1', 'author': 'Koen Van Impe',
'description': 'Query VulnDB - RiskBasedSecurity.com', 'description': 'Query VulnDB - RiskBasedSecurity.com',
'module-type': ['expansion', 'hover']} 'module-type': ['expansion', 'hover']}
@ -61,7 +61,7 @@ def handler(q=False):
add_dates = True add_dates = True
add_ext_references = True add_ext_references = True
if request["config"].get("discard_dates") is not None and request["config"].get("discard_dates").lower() == "true": if request["config"].get("discard_dates") is not None and request["config"].get("discard_dates").lower() == "true":
add_dates = False add_dates = False
if request["config"].get("discard_external_references") is not None and request["config"].get("discard_external_references").lower() == "true": if request["config"].get("discard_external_references") is not None and request["config"].get("discard_external_references").lower() == "true":
add_ext_references = False add_ext_references = False
@ -80,7 +80,7 @@ def handler(q=False):
find_by_cve_url = "%s/api/v1/vulnerabilities/%s/find_by_cve_id%s" % (VULNDB_URL, vulnerability, cpu_vulndb) find_by_cve_url = "%s/api/v1/vulnerabilities/%s/find_by_cve_id%s" % (VULNDB_URL, vulnerability, cpu_vulndb)
log.debug(find_by_cve_url) log.debug(find_by_cve_url)
try: try:
consumer = oauth.Consumer(key=apikey, secret=apisecret) consumer = oauth.Consumer(key=apikey, secret=apisecret)
@ -116,7 +116,7 @@ def handler(q=False):
if t_description: if t_description:
values_text.append(t_description) values_text.append(t_description)
if manual_notes: if manual_notes:
values_text.append("Notes: " + manual_notes) values_text.append("Notes: " + manual_notes)
if keywords: if keywords:
values_text.append("Keywords: " + keywords) values_text.append("Keywords: " + keywords)
if solution: if solution:
@ -130,22 +130,22 @@ def handler(q=False):
values_text.append("Solution date: " + solution_date) values_text.append("Solution date: " + solution_date)
disclosure_date = results.get('disclosure_date', '') or '' disclosure_date = results.get('disclosure_date', '') or ''
if disclosure_date: if disclosure_date:
values_text.append("Disclosure date: " + disclosure_date) values_text.append("Disclosure date: " + disclosure_date)
discovery_date = results.get('discovery_date', '') or '' discovery_date = results.get('discovery_date', '') or ''
if discovery_date: if discovery_date:
values_text.append("Discovery date: " + discovery_date) values_text.append("Discovery date: " + discovery_date)
exploit_publish_date = results.get('exploit_publish_date', '') or '' exploit_publish_date = results.get('exploit_publish_date', '') or ''
if exploit_publish_date: if exploit_publish_date:
values_text.append("Exploit published date: " + exploit_publish_date) values_text.append("Exploit published date: " + exploit_publish_date)
vendor_informed_date = results.get('vendor_informed_date', '') or '' vendor_informed_date = results.get('vendor_informed_date', '') or ''
if vendor_informed_date: if vendor_informed_date:
values_text.append("Vendor informed date: " + vendor_informed_date) values_text.append("Vendor informed date: " + vendor_informed_date)
vendor_ack_date = results.get('vendor_ack_date', '') or '' vendor_ack_date = results.get('vendor_ack_date', '') or ''
if vendor_ack_date: if vendor_ack_date:
values_text.append("Vendor acknowledgement date: " + vendor_ack_date) values_text.append("Vendor acknowledgement date: " + vendor_ack_date)
third_party_solution_date = results.get('third_party_solution_date', '') or '' third_party_solution_date = results.get('third_party_solution_date', '') or ''
if third_party_solution_date: if third_party_solution_date:
values_text.append("Third party solution date: " + third_party_solution_date) values_text.append("Third party solution date: " + third_party_solution_date)
# External references # External references
if add_ext_references: if add_ext_references:
@ -159,7 +159,7 @@ def handler(q=False):
elif reference_type == "News Article": elif reference_type == "News Article":
values_links.append(reference["value"]) values_links.append(reference["value"])
elif reference_type == "Generic Informational URL": elif reference_type == "Generic Informational URL":
values_links.append(reference["value"]) values_links.append(reference["value"])
elif reference_type == "Vendor Specific Advisory URL": elif reference_type == "Vendor Specific Advisory URL":
values_links.append(reference["value"]) values_links.append(reference["value"])
elif reference_type == "Vendor URL": elif reference_type == "Vendor URL":
@ -183,7 +183,7 @@ def handler(q=False):
values_links.append(reference_link) values_links.append(reference_link)
elif reference_type == "Exploit Database": elif reference_type == "Exploit Database":
reference_link = "https://www.exploit-db.com/exploits/%s" % reference["value"] reference_link = "https://www.exploit-db.com/exploits/%s" % reference["value"]
values_links.append(reference_link) values_links.append(reference_link)
elif reference_type == "Generic Informational URL": elif reference_type == "Generic Informational URL":
values_links.append(reference["value"]) values_links.append(reference["value"])
elif reference_type == "Generic Informational URL": elif reference_type == "Generic Informational URL":
@ -260,17 +260,17 @@ def handler(q=False):
values_text.append(vulnerability_classification) values_text.append(vulnerability_classification)
# Finished processing the VulnDB reply; set the result for MISP # Finished processing the VulnDB reply; set the result for MISP
output['results'] += [{'types': 'text', 'values': values_text }] output['results'] += [{'types': 'text', 'values': values_text}]
output['results'] += [{'types': 'link', 'values': values_links }] output['results'] += [{'types': 'link', 'values': values_links}]
if add_cpe: if add_cpe:
output['results'] += [{'types': 'cpe', 'values': values_cpe }] output['results'] += [{'types': 'cpe', 'values': values_cpe}]
return output return output
else: else:
misperrors["error"] = "No information retrieved from VulnDB." misperrors["error"] = "No information retrieved from VulnDB."
return misperrors return misperrors
except: except Exception:
misperrors["error"] = "Error while fetching information from VulnDB, wrong API keys?" misperrors["error"] = "Error while fetching information from VulnDB, wrong API keys?"
return misperrors return misperrors
def introspection(): def introspection():

View File

@ -1,5 +1,4 @@
import json import json
import requests
import vulners import vulners
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
@ -48,10 +47,10 @@ def handler(q=False):
exploit_summary += " || " + str(len(vulners_exploits[0])) + " Public exploits available:\n " exploit_summary += " || " + str(len(vulners_exploits[0])) + " Public exploits available:\n "
for exploit in vulners_exploits[0]: for exploit in vulners_exploits[0]:
exploit_summary += exploit['title'] + " " + exploit['href'] + "\n " exploit_summary += exploit['title'] + " " + exploit['href'] + "\n "
exploit_summary += "|| Vulnerability Description: " + vuln_summary exploit_summary += "|| Vulnerability Description: " + vuln_summary
summary = ai_summary + exploit_summary + vuln_summary summary = ai_summary + exploit_summary + vuln_summary
r = {'results': [{'types': mispattributes['output'], 'values': summary}]} r = {'results': [{'types': mispattributes['output'], 'values': summary}]}
return r return r

View File

@ -1,5 +1,4 @@
import json import json
import requests
from SPARQLWrapper import SPARQLWrapper, JSON from SPARQLWrapper import SPARQLWrapper, JSON
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
@ -17,17 +16,17 @@ def handler(q=False):
if not request.get('text'): if not request.get('text'):
misperrors['error'] = 'Query text missing' misperrors['error'] = 'Query text missing'
return misperrors return misperrors
sparql = SPARQLWrapper(wiki_api_url) sparql = SPARQLWrapper(wiki_api_url)
query_string = \ query_string = \
"SELECT ?item \n" \ "SELECT ?item \n" \
"WHERE { \n" \ "WHERE { \n" \
"?item rdfs:label\"" + request.get('text') + "\" @en \n" \ "?item rdfs:label\"" + request.get('text') + "\" @en \n" \
"}\n"; "}\n"
sparql.setQuery(query_string) sparql.setQuery(query_string)
sparql.setReturnFormat(JSON) sparql.setReturnFormat(JSON)
results = sparql.query().convert() results = sparql.query().convert()
summary = '' summary = ''
try: try:
result = results["results"]["bindings"][0] result = results["results"]["bindings"][0]
@ -47,4 +46,3 @@ def introspection():
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -5,17 +5,17 @@ import sys
BASEurl = "https://api.xforce.ibmcloud.com/" BASEurl = "https://api.xforce.ibmcloud.com/"
extensions = {"ip1": "ipr/%s", extensions = {"ip1": "ipr/%s",
"ip2": "ipr/malware/%s", "ip2": "ipr/malware/%s",
"url": "url/%s", "url": "url/%s",
"hash": "malware/%s", "hash": "malware/%s",
"vuln": "/vulnerabilities/search/%s", "vuln": "/vulnerabilities/search/%s",
"dns": "resolve/%s"} "dns": "resolve/%s"}
sys.path.append('./') sys.path.append('./')
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'vulnerability', 'md5', 'sha1', 'sha256'], mispattributes = {'input': ['ip-src', 'ip-dst', 'vulnerability', 'md5', 'sha1', 'sha256'],
'output': ['ip-src', 'ip-dst', 'text', 'domain']} 'output': ['ip-src', 'ip-dst', 'text', 'domain']}
# possible module-types: 'expansion', 'hover' or both # possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Joerg Stephan (@johest)', moduleinfo = {'version': '1', 'author': 'Joerg Stephan (@johest)',
@ -24,78 +24,80 @@ moduleinfo = {'version': '1', 'author': 'Joerg Stephan (@johest)',
# config fields that your code expects from the site admin # config fields that your code expects from the site admin
moduleconfig = ["apikey", "event_limit"] moduleconfig = ["apikey", "event_limit"]
limit = 5000 #Default limit = 5000 # Default
def MyHeader(key=False): def MyHeader(key=False):
global limit global limit
if key is False: if key is False:
return None return None
return {"Authorization": "Basic %s " % key, return {"Authorization": "Basic %s " % key,
"Accept": "application/json", "Accept": "application/json",
'User-Agent': 'Mozilla 5.0'} 'User-Agent': 'Mozilla 5.0'}
def handler(q=False): def handler(q=False):
global limit global limit
if q is False: if q is False:
return False return False
q = json.loads(q) q = json.loads(q)
key = q["config"]["apikey"]
limit = int(q["config"].get("event_limit", 5))
r = {"results": []} key = q["config"]["apikey"]
limit = int(q["config"].get("event_limit", 5))
if "ip-src" in q:
r["results"] += apicall("dns", q["ip-src"], key) r = {"results": []}
if "ip-dst" in q:
r["results"] += apicall("dns", q["ip-dst"], key) if "ip-src" in q:
if "md5" in q: r["results"] += apicall("dns", q["ip-src"], key)
r["results"] += apicall("hash", q["md5"], key) if "ip-dst" in q:
if "sha1" in q: r["results"] += apicall("dns", q["ip-dst"], key)
r["results"] += apicall("hash", q["sha1"], key) if "md5" in q:
if "sha256" in q: r["results"] += apicall("hash", q["md5"], key)
r["results"] += apicall("hash", q["sha256"], key) if "sha1" in q:
if 'vulnerability' in q: r["results"] += apicall("hash", q["sha1"], key)
r["results"] += apicall("vuln", q["vulnerability"], key) if "sha256" in q:
if "domain" in q: r["results"] += apicall("hash", q["sha256"], key)
if 'vulnerability' in q:
r["results"] += apicall("vuln", q["vulnerability"], key)
if "domain" in q:
r["results"] += apicall("dns", q["domain"], key) r["results"] += apicall("dns", q["domain"], key)
uniq = [] uniq = []
for res in r["results"]: for res in r["results"]:
if res not in uniq: if res not in uniq:
uniq.append(res) uniq.append(res)
r["results"] = uniq r["results"] = uniq
return r return r
def apicall(indicator_type, indicator, key=False): def apicall(indicator_type, indicator, key=False):
try: try:
myURL = BASEurl + (extensions[str(indicator_type)])%indicator myURL = BASEurl + (extensions[str(indicator_type)]) % indicator
jsondata = requests.get(myURL, headers=MyHeader(key)).json() jsondata = requests.get(myURL, headers=MyHeader(key)).json()
except: except Exception:
jsondata = None jsondata = None
redata = [] redata = []
#print(jsondata) # print(jsondata)
if not jsondata is None: if jsondata is not None:
if indicator_type is "hash": if indicator_type is "hash":
if "malware" in jsondata: if "malware" in jsondata:
lopointer = jsondata["malware"] lopointer = jsondata["malware"]
redata.append({"type": "text", "values": lopointer["risk"]}) redata.append({"type": "text", "values": lopointer["risk"]})
if indicator_type is "dns": if indicator_type is "dns":
if "records" in str(jsondata): if "records" in str(jsondata):
lopointer = jsondata["Passive"]["records"] lopointer = jsondata["Passive"]["records"]
for dataset in lopointer: for dataset in lopointer:
redata.append({"type":"domain", "values": dataset["value"]}) redata.append({"type": "domain", "values": dataset["value"]})
return redata return redata
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -13,11 +13,13 @@ moduleinfo = {'version': '1', 'author': 'Christian STUDER',
moduleconfig = [] moduleconfig = []
mispattributes = {'input': ['md5', 'sha1', 'sha256', 'filename|md5', 'filename|sha1', 'filename|sha256', 'imphash'], 'output': ['yara']} mispattributes = {'input': ['md5', 'sha1', 'sha256', 'filename|md5', 'filename|sha1', 'filename|sha256', 'imphash'], 'output': ['yara']}
def get_hash_condition(hashtype, hashvalue): def get_hash_condition(hashtype, hashvalue):
hashvalue = hashvalue.lower() hashvalue = hashvalue.lower()
required_module, params = ('pe', '()') if hashtype == 'imphash' else ('hash', '(0, filesize)') required_module, params = ('pe', '()') if hashtype == 'imphash' else ('hash', '(0, filesize)')
return '{}.{}{} == "{}"'.format(required_module, hashtype, params, hashvalue), required_module return '{}.{}{} == "{}"'.format(required_module, hashtype, params, hashvalue), required_module
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -42,9 +44,11 @@ def handler(q=False):
return misperrors return misperrors
return {'results': [{'types': mispattributes['output'], 'values': rule}]} return {'results': [{'types': mispattributes['output'], 'values': rule}]}
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -1,5 +1,4 @@
import json import json
import requests
try: try:
import yara import yara
except (OSError, ImportError): except (OSError, ImportError):
@ -20,7 +19,7 @@ def handler(q=False):
return misperrors return misperrors
try: try:
rules = yara.compile(source=request.get('yara')) yara.compile(source=request.get('yara'))
summary = ("Syntax valid") summary = ("Syntax valid")
except Exception as e: except Exception as e:
summary = ("Syntax error: " + str(e)) summary = ("Syntax error: " + str(e))

View File

@ -1 +1,2 @@
__all__ = ['cef_export','liteexport','goamlexport','threat_connect_export','pdfexport','threatStream_misp_export', 'osqueryexport'] __all__ = ['cef_export', 'liteexport', 'goamlexport', 'threat_connect_export', 'pdfexport',
'threatStream_misp_export', 'osqueryexport']

View File

@ -12,30 +12,32 @@ moduleinfo = {'version': '1', 'author': 'Hannah Ward',
# config fields that your code expects from the site admin # config fields that your code expects from the site admin
moduleconfig = ["Default_Severity", "Device_Vendor", "Device_Product", "Device_Version"] moduleconfig = ["Default_Severity", "Device_Vendor", "Device_Product", "Device_Version"]
cefmapping = {"ip-src":"src", "ip-dst":"dst", "hostname":"dhost", "domain":"dhost", cefmapping = {"ip-src": "src", "ip-dst": "dst", "hostname": "dhost", "domain": "dhost",
"md5":"fileHash", "sha1":"fileHash", "sha256":"fileHash", "md5": "fileHash", "sha1": "fileHash", "sha256": "fileHash",
"url":"request"} "url": "request"}
mispattributes = {'input':list(cefmapping.keys())} mispattributes = {'input': list(cefmapping.keys())}
outputFileExtension = "cef" outputFileExtension = "cef"
responseType = "application/txt" responseType = "application/txt"
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
request = json.loads(q) request = json.loads(q)
if "config" in request: if "config" in request:
config = request["config"] config = request["config"]
else: else:
config = {"Default_Severity":1, "Device_Vendor":"MISP", "Device_Product":"MISP", "Device_Version":1} config = {"Default_Severity": 1, "Device_Vendor": "MISP",
"Device_Product": "MISP", "Device_Version": 1}
data = request["data"] data = request["data"]
response = "" response = ""
for ev in data: for ev in data:
event = ev["Attribute"] event = ev["Attribute"]
for attr in event: for attr in event:
if attr["type"] in cefmapping: if attr["type"] in cefmapping:
response += "{} host CEF:0|{}|{}|{}|{}|{}|{}|{}={}\n".format( response += "{} host CEF:0|{}|{}|{}|{}|{}|{}|{}={}\n".format(
datetime.datetime.fromtimestamp(int(attr["timestamp"])).strftime("%b %d %H:%M:%S"), datetime.datetime.fromtimestamp(int(attr["timestamp"])).strftime("%b %d %H:%M:%S"),
config["Device_Vendor"], config["Device_Vendor"],
config["Device_Product"], config["Device_Product"],
@ -45,37 +47,37 @@ def handler(q=False):
config["Default_Severity"], config["Default_Severity"],
cefmapping[attr["type"]], cefmapping[attr["type"]],
attr["value"], attr["value"],
) )
r = {"response":[], "data":str(base64.b64encode(bytes(response, 'utf-8')), 'utf-8')} r = {"response": [], "data": str(base64.b64encode(bytes(response, 'utf-8')), 'utf-8')}
return r return r
def introspection(): def introspection():
modulesetup = {} modulesetup = {}
try: try:
responseType responseType
modulesetup['responseType'] = responseType modulesetup['responseType'] = responseType
except NameError: except NameError:
pass pass
try: try:
userConfig userConfig
modulesetup['userConfig'] = userConfig modulesetup['userConfig'] = userConfig
except NameError: except NameError:
pass pass
try: try:
outputFileExtension outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension modulesetup['outputFileExtension'] = outputFileExtension
except NameError: except NameError:
pass pass
try: try:
inputSource inputSource
modulesetup['inputSource'] = inputSource modulesetup['inputSource'] = inputSource
except NameError: except NameError:
pass pass
return modulesetup return modulesetup
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -27,7 +27,7 @@ goAMLmapping = {'bank-account': {'bank-account': 't_account', 'institution-name'
'person': {'person': 't_person', 'text': 'comments', 'first-name': 'first_name', 'person': {'person': 't_person', 'text': 'comments', 'first-name': 'first_name',
'middle-name': 'middle_name', 'last-name': 'last_name', 'title': 'title', 'middle-name': 'middle_name', 'last-name': 'last_name', 'title': 'title',
'mothers-name': 'mothers_name', 'alias': 'alias', 'date-of-birth': 'birthdate', 'mothers-name': 'mothers_name', 'alias': 'alias', 'date-of-birth': 'birthdate',
'place-of-birth': 'birth_place', 'gender': 'gender','nationality': 'nationality1', 'place-of-birth': 'birth_place', 'gender': 'gender', 'nationality': 'nationality1',
'passport-number': 'passport_number', 'passport-country': 'passport_country', 'passport-number': 'passport_number', 'passport-country': 'passport_country',
'social-security-number': 'ssn', 'identity-card-number': 'id_number'}, 'social-security-number': 'ssn', 'identity-card-number': 'id_number'},
'geolocation': {'geolocation': 'location', 'city': 'city', 'region': 'state', 'geolocation': {'geolocation': 'location', 'city': 'city', 'region': 'state',
@ -48,6 +48,7 @@ referencesMapping = {'bank-account': {'aml_type': '{}_account', 'bracket': 't_{}
'legal-entity': {'transaction': {'aml_type': '{}_entity', 'bracket': 't_{}'}, 'bank-account': {'aml_type': 't_entity'}}, 'legal-entity': {'transaction': {'aml_type': '{}_entity', 'bracket': 't_{}'}, 'bank-account': {'aml_type': 't_entity'}},
'geolocation': {'aml_type': 'address', 'bracket': 'addresses'}} 'geolocation': {'aml_type': 'address', 'bracket': 'addresses'}}
class GoAmlGeneration(object): class GoAmlGeneration(object):
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
@ -186,6 +187,7 @@ class GoAmlGeneration(object):
self.itterate(next_object_type, next_aml_type, uuid, xml_part) self.itterate(next_object_type, next_aml_type, uuid, xml_part)
self.xml[xml_part] += "</{}>".format(bracket) self.xml[xml_part] += "</{}>".format(bracket)
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -212,6 +214,7 @@ def handler(q=False):
exp_doc = "{}{}".format(export_doc.xml.get('header'), export_doc.xml.get('data')) exp_doc = "{}{}".format(export_doc.xml.get('header'), export_doc.xml.get('data'))
return {'response': [], 'data': str(base64.b64encode(bytes(exp_doc, 'utf-8')), 'utf-8')} return {'response': [], 'data': str(base64.b64encode(bytes(exp_doc, 'utf-8')), 'utf-8')}
def introspection(): def introspection():
modulesetup = {} modulesetup = {}
try: try:
@ -236,6 +239,7 @@ def introspection():
pass pass
return modulesetup return modulesetup
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -3,10 +3,10 @@ import base64
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
moduleinfo = {'version': '1', moduleinfo = {'version': '1',
'author': 'TM', 'author': 'TM',
'description': 'export lite', 'description': 'export lite',
'module-type': ['export']} 'module-type': ['export']}
moduleconfig = ["indent_json_export"] moduleconfig = ["indent_json_export"]
@ -14,76 +14,75 @@ mispattributes = {}
outputFileExtension = "json" outputFileExtension = "json"
responseType = "application/json" responseType = "application/json"
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
request = json.loads(q) request = json.loads(q)
config = {} config = {}
if "config" in request: if "config" in request:
config = request["config"] config = request["config"]
else: else:
config = {"indent_json_export" : None} config = {"indent_json_export": None}
if config['indent_json_export'] is not None: if config['indent_json_export'] is not None:
try: try:
config['indent_json_export'] = int(config['indent_json_export']) config['indent_json_export'] = int(config['indent_json_export'])
except: except Exception:
config['indent_json_export'] = None config['indent_json_export'] = None
if 'data' not in request: if 'data' not in request:
return False return False
#~ Misp json structur # ~ Misp json structur
liteEvent = {'Event':{}} liteEvent = {'Event': {}}
for evt in request['data']: for evt in request['data']:
rawEvent = evt['Event'] rawEvent = evt['Event']
liteEvent['Event']['info'] = rawEvent['info'] liteEvent['Event']['info'] = rawEvent['info']
liteEvent['Event']['Attribute'] = [] liteEvent['Event']['Attribute'] = []
attrs = evt['Attribute'] attrs = evt['Attribute']
for attr in attrs: for attr in attrs:
if 'Internal reference' not in attr['category']: if 'Internal reference' not in attr['category']:
liteAttr = {} liteAttr = {}
liteAttr['category'] = attr['category'] liteAttr['category'] = attr['category']
liteAttr['type'] = attr['type'] liteAttr['type'] = attr['type']
liteAttr['value'] = attr['value'] liteAttr['value'] = attr['value']
liteEvent['Event']['Attribute'].append(liteAttr) liteEvent['Event']['Attribute'].append(liteAttr)
return {'response': [],
'data': str(base64.b64encode(bytes(
json.dumps(liteEvent, indent=config['indent_json_export']), 'utf-8')), 'utf-8')}
return {'response' : [],
'data' : str(base64.b64encode(
bytes(
json.dumps(liteEvent, indent=config['indent_json_export']),
'utf-8')),
'utf-8')
}
def introspection(): def introspection():
modulesetup = {} modulesetup = {}
try: try:
responseType responseType
modulesetup['responseType'] = responseType modulesetup['responseType'] = responseType
except NameError: except NameError:
pass pass
try: try:
userConfig userConfig
modulesetup['userConfig'] = userConfig modulesetup['userConfig'] = userConfig
except NameError: except NameError:
pass pass
try: try:
outputFileExtension outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension modulesetup['outputFileExtension'] = outputFileExtension
except NameError: except NameError:
pass pass
try: try:
inputSource inputSource
modulesetup['inputSource'] = inputSource modulesetup['inputSource'] = inputSource
except NameError: except NameError:
pass pass
return modulesetup return modulesetup
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -13,7 +13,7 @@ types_to_use = ['regkey', 'regkey|value', 'mutex', 'windows-service-displayname'
userConfig = { userConfig = {
}; }
moduleconfig = [] moduleconfig = []
inputSource = ['event'] inputSource = ['event']
@ -26,6 +26,7 @@ moduleinfo = {'version': '1.0', 'author': 'Julien Bachmann, Hacknowledge',
'description': 'OSQuery query export module', 'description': 'OSQuery query export module',
'module-type': ['export']} 'module-type': ['export']}
def handle_regkey(value): def handle_regkey(value):
rep = {'HKCU': 'HKEY_USERS\\%', 'HKLM': 'HKEY_LOCAL_MACHINE'} rep = {'HKCU': 'HKEY_USERS\\%', 'HKLM': 'HKEY_LOCAL_MACHINE'}
rep = dict((re.escape(k), v) for k, v in rep.items()) rep = dict((re.escape(k), v) for k, v in rep.items())
@ -33,6 +34,7 @@ def handle_regkey(value):
value = pattern.sub(lambda m: rep[re.escape(m.group(0))], value) value = pattern.sub(lambda m: rep[re.escape(m.group(0))], value)
return 'SELECT * FROM registry WHERE path LIKE \'%s\';' % value return 'SELECT * FROM registry WHERE path LIKE \'%s\';' % value
def handle_regkeyvalue(value): def handle_regkeyvalue(value):
key, value = value.split('|') key, value = value.split('|')
rep = {'HKCU': 'HKEY_USERS\\%', 'HKLM': 'HKEY_LOCAL_MACHINE'} rep = {'HKCU': 'HKEY_USERS\\%', 'HKLM': 'HKEY_LOCAL_MACHINE'}
@ -41,27 +43,33 @@ def handle_regkeyvalue(value):
key = pattern.sub(lambda m: rep[re.escape(m.group(0))], key) key = pattern.sub(lambda m: rep[re.escape(m.group(0))], key)
return 'SELECT * FROM registry WHERE path LIKE \'%s\' AND data LIKE \'%s\';' % (key, value) return 'SELECT * FROM registry WHERE path LIKE \'%s\' AND data LIKE \'%s\';' % (key, value)
def handle_mutex(value): def handle_mutex(value):
return 'SELECT * FROM winbaseobj WHERE object_name LIKE \'%s\';' % value return 'SELECT * FROM winbaseobj WHERE object_name LIKE \'%s\';' % value
def handle_service(value): def handle_service(value):
return 'SELECT * FROM services WHERE display_name LIKE \'%s\' OR name like \'%s\';' % (value, value) return 'SELECT * FROM services WHERE display_name LIKE \'%s\' OR name like \'%s\';' % (value, value)
def handle_yara(value): def handle_yara(value):
return 'not implemented yet, not sure it\'s easily feasible w/o dropping the sig on the hosts first' return 'not implemented yet, not sure it\'s easily feasible w/o dropping the sig on the hosts first'
def handle_scheduledtask(value): def handle_scheduledtask(value):
return 'SELECT * FROM scheduled_tasks WHERE name LIKE \'%s\';' % value return 'SELECT * FROM scheduled_tasks WHERE name LIKE \'%s\';' % value
handlers = { handlers = {
'regkey' : handle_regkey, 'regkey': handle_regkey,
'regkey|value' : handle_regkeyvalue, 'regkey|value': handle_regkeyvalue,
'mutex' : handle_mutex, 'mutex': handle_mutex,
'windows-service-displayname' : handle_service, 'windows-service-displayname': handle_service,
'windows-scheduled-task' : handle_scheduledtask, 'windows-scheduled-task': handle_scheduledtask,
'yara' : handle_yara 'yara': handle_yara
} }
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -73,7 +81,7 @@ def handler(q=False):
for attribute in event["Attribute"]: for attribute in event["Attribute"]:
if attribute['type'] in types_to_use: if attribute['type'] in types_to_use:
output = output + handlers[attribute['type']](attribute['value']) + '\n' output = output + handlers[attribute['type']](attribute['value']) + '\n'
r = {"response":[], "data":str(base64.b64encode(bytes(output, 'utf-8')), 'utf-8')} r = {"response": [], "data": str(base64.b64encode(bytes(output, 'utf-8')), 'utf-8')}
return r return r

View File

@ -126,7 +126,7 @@ class ReportGenerator():
summary = a.value summary = a.value
return title.format(internal_id=internal_id, title=self.misp_event.info, return title.format(internal_id=internal_id, title=self.misp_event.info,
summary=summary) summary=summary)
def asciidoc(self, lang='en'): def asciidoc(self, lang='en'):
self.report += self.title() self.report += self.title()

View File

@ -1,13 +1,12 @@
import json import json
import base64 import base64
import csv
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
userConfig = { userConfig = {
}; }
moduleconfig = [] moduleconfig = []
@ -28,9 +27,9 @@ def handler(q=False):
if q is False: if q is False:
return False return False
r = {'results': []} r = {'results': []}
result = json.loads(q) result = json.loads(q) # noqa
output = ''; # Insert your magic here! output = '' # Insert your magic here!
r = {"data":base64.b64encode(output.encode('utf-8')).decode('utf-8')} r = {"data": base64.b64encode(output.encode('utf-8')).decode('utf-8')}
return r return r

View File

@ -49,9 +49,7 @@ def handler(q=False):
if q is False or not q: if q is False or not q:
return False return False
request = json.loads(q) request = json.loads(q)
response = io.StringIO() response = io.StringIO()
writer = csv.DictWriter(response, fieldnames=["value", "itype", "tags"]) writer = csv.DictWriter(response, fieldnames=["value", "itype", "tags"])

View File

@ -1,3 +1,3 @@
from . import _vmray from . import _vmray # noqa
__all__ = ['vmray_import', 'ocr', 'cuckooimport', 'goamlimport', 'email_import', 'mispjson', 'openiocimport', 'threatanalyzer_import', 'csvimport'] __all__ = ['vmray_import', 'ocr', 'cuckooimport', 'goamlimport', 'email_import', 'mispjson', 'openiocimport', 'threatanalyzer_import', 'csvimport']

View File

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import json, os, base64 import json
import os
import base64
import pymisp import pymisp
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
@ -9,18 +11,19 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
moduleconfig = [] moduleconfig = []
inputSource = ['file'] inputSource = ['file']
userConfig = {'header': { userConfig = {'header': {
'type': 'String', 'type': 'String',
'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'}, 'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'},
'has_header':{ 'has_header': {
'type': 'Boolean', 'type': 'Boolean',
'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, in the file (which will be skipped atm).' 'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, in the file (which will be skipped atm).'
}} }}
duplicatedFields = {'mispType': {'mispComment': 'comment'}, duplicatedFields = {'mispType': {'mispComment': 'comment'},
'attrField': {'attrComment': 'comment'}} 'attrField': {'attrComment': 'comment'}}
attributesFields = ['type', 'value', 'category', 'to_ids', 'comment', 'distribution'] attributesFields = ['type', 'value', 'category', 'to_ids', 'comment', 'distribution']
delimiters = [',', ';', '|', '/', '\t', ' '] delimiters = [',', ';', '|', '/', '\t', ' ']
class CsvParser(): class CsvParser():
def __init__(self, header, has_header): def __init__(self, header, has_header):
self.header = header self.header = header
@ -32,17 +35,17 @@ class CsvParser():
return_data = [] return_data = []
if self.fields_number == 1: if self.fields_number == 1:
for line in data: for line in data:
l = line.split('#')[0].strip() line = line.split('#')[0].strip()
if l: if line:
return_data.append(l) return_data.append(line)
self.delimiter = None self.delimiter = None
else: else:
self.delimiter_count = dict([(d, 0) for d in delimiters]) self.delimiter_count = dict([(d, 0) for d in delimiters])
for line in data: for line in data:
l = line.split('#')[0].strip() line = line.split('#')[0].strip()
if l: if line:
self.parse_delimiter(l) self.parse_delimiter(line)
return_data.append(l) return_data.append(line)
# find which delimiter is used # find which delimiter is used
self.delimiter = self.find_delimiter() self.delimiter = self.find_delimiter()
self.data = return_data[1:] if self.has_header else return_data self.data = return_data[1:] if self.has_header else return_data
@ -115,6 +118,7 @@ class CsvParser():
# return list of indexes of the misp types, list of the misp types, remaining fields that will be attribute fields # return list of indexes of the misp types, list of the misp types, remaining fields that will be attribute fields
return list2pop, misp, list(reversed(head)) return list2pop, misp, list(reversed(head))
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -138,6 +142,7 @@ def handler(q=False):
r = {'results': csv_parser.attributes} r = {'results': csv_parser.attributes}
return r return r
def introspection(): def introspection():
modulesetup = {} modulesetup = {}
try: try:
@ -152,6 +157,7 @@ def introspection():
pass pass
return modulesetup return modulesetup
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -1,7 +1,5 @@
import json import json
import logging import base64
import sys
import base64
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
userConfig = {} userConfig = {}
@ -13,160 +11,163 @@ moduleinfo = {'version': '0.1', 'author': 'Victor van der Stoep',
moduleconfig = [] moduleconfig = []
def handler(q=False): def handler(q=False):
# Just in case we have no data # Just in case we have no data
if q is False: if q is False:
return False return False
# The return value # The return value
r = {'results': []} r = {'results': []}
# Load up that JSON # Load up that JSON
q = json.loads(q) q = json.loads(q)
data = base64.b64decode(q.get("data")).decode('utf-8') data = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened # If something really weird happened
if not data: if not data:
return json.dumps({"success": 0}) return json.dumps({"success": 0})
data = json.loads(data) data = json.loads(data)
# Get characteristics of file # Get characteristics of file
targetFile = data['target']['file'] targetFile = data['target']['file']
# Process the inital binary # Process the inital binary
processBinary(r, targetFile, initial = True) processBinary(r, targetFile, initial=True)
# Get binary information for dropped files # Get binary information for dropped files
if(data.get('dropped')): if(data.get('dropped')):
for droppedFile in data['dropped']: for droppedFile in data['dropped']:
processBinary(r, droppedFile, dropped = True) processBinary(r, droppedFile, dropped=True)
# Add malscore to results # Add malscore to results
r["results"].append({ r["results"].append({
"values": "Malscore: {} ".format(data['malscore']), "values": "Malscore: {} ".format(data['malscore']),
"types": "comment", "types": "comment",
"categories": "Payload delivery", "categories": "Payload delivery",
"comment": "Cuckoo analysis: MalScore" "comment": "Cuckoo analysis: MalScore"
}) })
# Add virustotal data, if exists # Add virustotal data, if exists
if(data.get('virustotal')): if(data.get('virustotal')):
processVT(r, data['virustotal']) processVT(r, data['virustotal'])
# Add network information, should be improved # Add network information, should be improved
processNetwork(r, data['network']) processNetwork(r, data['network'])
# Add behavioral information # Add behavioral information
processSummary(r, data['behavior']['summary']) processSummary(r, data['behavior']['summary'])
# Return # Return
return r return r
def processSummary(r, summary): def processSummary(r, summary):
r["results"].append({ r["results"].append({
"values": summary['mutexes'], "values": summary['mutexes'],
"types": "mutex", "types": "mutex",
"categories": "Artifacts dropped", "categories": "Artifacts dropped",
"comment": "Cuckoo analysis: Observed mutexes" "comment": "Cuckoo analysis: Observed mutexes"
}) })
def processVT(r, virustotal): def processVT(r, virustotal):
category = "Antivirus detection" category = "Antivirus detection"
comment = "VirusTotal analysis" comment = "VirusTotal analysis"
if(virustotal.get('permalink')): if(virustotal.get('permalink')):
r["results"].append({ r["results"].append({
"values": virustotal['permalink'], "values": virustotal['permalink'],
"types": "link", "types": "link",
"categories": category, "categories": category,
"comments": comment + " - Permalink" "comments": comment + " - Permalink"
}) })
if(virustotal.get('total')): if(virustotal.get('total')):
r["results"].append({ r["results"].append({
"values": "VirusTotal detection rate {}/{}".format( "values": "VirusTotal detection rate {}/{}".format(
virustotal['positives'], virustotal['positives'],
virustotal['total'] virustotal['total']
), ),
"types": "comment", "types": "comment",
"categories": category, "categories": category,
"comment": comment
})
else:
r["results"].append({
"values": "Sample not detected on VirusTotal",
"types": "comment",
"categories": category,
"comment": comment "comment": comment
}) })
else:
r["results"].append({
"values": "Sample not detected on VirusTotal",
"types": "comment",
"categories": category,
"comment": comment
})
def processNetwork(r, network): def processNetwork(r, network):
category = "Network activity" category = "Network activity"
for host in network['hosts']: for host in network['hosts']:
r["results"].append({ r["results"].append({
"values": host['ip'], "values": host['ip'],
"types": "ip-dst", "types": "ip-dst",
"categories": category, "categories": category,
"comment": "Cuckoo analysis: Observed network traffic" "comment": "Cuckoo analysis: Observed network traffic"
}) })
def processBinary(r, target, initial = False, dropped = False):
if(initial): def processBinary(r, target, initial=False, dropped=False):
if(initial):
comment = "Cuckoo analysis: Initial file" comment = "Cuckoo analysis: Initial file"
category = "Payload delivery" category = "Payload delivery"
elif(dropped): elif(dropped):
category = "Artifacts dropped" category = "Artifacts dropped"
comment = "Cuckoo analysis: Dropped file" comment = "Cuckoo analysis: Dropped file"
r["results"].append({ r["results"].append({
"values": target['name'], "values": target['name'],
"types": "filename", "types": "filename",
"categories": category, "categories": category,
"comment": comment "comment": comment
}) })
r["results"].append({ r["results"].append({
"values": target['md5'], "values": target['md5'],
"types": "md5", "types": "md5",
"categories": category, "categories": category,
"comment": comment "comment": comment
}) })
r["results"].append({ r["results"].append({
"values": target['sha1'], "values": target['sha1'],
"types": "sha1", "types": "sha1",
"categories": category, "categories": category,
"comment": comment "comment": comment
}) })
r["results"].append({ r["results"].append({
"values": target['sha256'], "values": target['sha256'],
"types": "sha256", "types": "sha256",
"categories": category, "categories": category,
"comment": comment "comment": comment
}) })
r["results"].append({ r["results"].append({
"values": target['sha512'], "values": target['sha512'],
"types": "sha512", "types": "sha512",
"categories": category, "categories": category,
"comment": comment "comment": comment
}) })
# todo : add file size? # todo : add file size?
if(target.get('guest_paths')): if(target.get('guest_paths')):
r["results"].append({ r["results"].append({
"values": target['guest_paths'], "values": target['guest_paths'],
"types": "filename", "types": "filename",
"categories": "Payload installation", "categories": "Payload installation",
"comment": comment + " - Path" "comment": comment + " - Path"
}) })
def introspection(): def introspection():
modulesetup = {} modulesetup = {}
@ -187,10 +188,11 @@ def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo
if __name__ == '__main__': if __name__ == '__main__':
x = open('test.json', 'r') x = open('test.json', 'r')
q = [] q = []
q['data'] = x.read() q['data'] = x.read()
q = base64.base64encode(q) q = base64.base64encode(q)
handler(q) handler(q)

View File

@ -115,7 +115,7 @@ def handler(q=False):
email_targets = set() email_targets = set()
for rec in received: for rec in received:
try: try:
email_check = re.search("for\s(.*@.*);", rec).group(1) email_check = re.search(r"for\s(.*@.*);", rec).group(1)
email_check = email_check.strip(' <>') email_check = email_check.strip(' <>')
email_targets.add(parseaddr(email_check)[1]) email_targets.add(parseaddr(email_check)[1])
except (AttributeError): except (AttributeError):
@ -166,7 +166,7 @@ def handler(q=False):
for ext in zipped_files: for ext in zipped_files:
if filename.endswith(ext) is True: if filename.endswith(ext) is True:
zipped_filetype = True zipped_filetype = True
if zipped_filetype == False: if not zipped_filetype:
try: try:
attachment_files += get_zipped_contents(filename, attachment_data) attachment_files += get_zipped_contents(filename, attachment_data)
except RuntimeError: # File is encrypted with a password except RuntimeError: # File is encrypted with a password
@ -294,7 +294,7 @@ def get_zip_passwords(message):
# Grab any strings that are marked off by special chars # Grab any strings that are marked off by special chars
marking_chars = [["\'", "\'"], ['"', '"'], ['[', ']'], ['(', ')']] marking_chars = [["\'", "\'"], ['"', '"'], ['[', ']'], ['(', ')']]
for char_set in marking_chars: for char_set in marking_chars:
regex = re.compile("""\{0}([^\{1}]*)\{1}""".format(char_set[0], char_set[1])) regex = re.compile(r"""\{0}([^\{1}]*)\{1}""".format(char_set[0], char_set[1]))
marked_off = re.findall(regex, raw_text) marked_off = re.findall(regex, raw_text)
possible_passwords += marked_off possible_passwords += marked_off
@ -397,6 +397,7 @@ def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo
if __name__ == '__main__': if __name__ == '__main__':
with open('tests/test_no_attach.eml', 'r') as email_file: with open('tests/test_no_attach.eml', 'r') as email_file:
handler(q=email_file.read()) handler(q=email_file.read())

View File

@ -1,6 +1,7 @@
import json, datetime, time, base64 import json
import time
import base64
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from collections import defaultdict
from pymisp import MISPEvent, MISPObject from pymisp import MISPEvent, MISPObject
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
@ -11,12 +12,12 @@ moduleconfig = []
mispattributes = {'inputSource': ['file'], 'output': ['MISP objects']} mispattributes = {'inputSource': ['file'], 'output': ['MISP objects']}
t_from_objects = {'nodes': ['from_person', 'from_account', 'from_entity'], t_from_objects = {'nodes': ['from_person', 'from_account', 'from_entity'],
'leaves': ['from_funds_code', 'from_country']} 'leaves': ['from_funds_code', 'from_country']}
t_to_objects = {'nodes': ['to_person', 'to_account', 'to_entity'], t_to_objects = {'nodes': ['to_person', 'to_account', 'to_entity'],
'leaves': ['to_funds_code', 'to_country']} 'leaves': ['to_funds_code', 'to_country']}
t_person_objects = {'nodes': ['addresses'], t_person_objects = {'nodes': ['addresses'],
'leaves': ['first_name', 'middle_name', 'last_name', 'gender', 'title', 'mothers_name', 'birthdate', 'leaves': ['first_name', 'middle_name', 'last_name', 'gender', 'title', 'mothers_name', 'birthdate',
'passport_number', 'passport_country', 'id_number', 'birth_place', 'alias', 'nationality1']} 'passport_number', 'passport_country', 'id_number', 'birth_place', 'alias', 'nationality1']}
t_account_objects = {'nodes': ['signatory'], t_account_objects = {'nodes': ['signatory'],
'leaves': ['institution_name', 'institution_code', 'swift', 'branch', 'non_banking_insitution', 'leaves': ['institution_name', 'institution_code', 'swift', 'branch', 'non_banking_insitution',
'account', 'currency_code', 'account_name', 'iban', 'client_number', 'opened', 'closed', 'account', 'currency_code', 'account_name', 'iban', 'client_number', 'opened', 'closed',
@ -51,7 +52,7 @@ t_account_mapping = {'misp_name': 'bank-account', 'institution_name': 'instituti
t_person_mapping = {'misp_name': 'person', 'comments': 'text', 'first_name': 'first-name', 'middle_name': 'middle-name', t_person_mapping = {'misp_name': 'person', 'comments': 'text', 'first_name': 'first-name', 'middle_name': 'middle-name',
'last_name': 'last-name', 'title': 'title', 'mothers_name': 'mothers-name', 'alias': 'alias', 'last_name': 'last-name', 'title': 'title', 'mothers_name': 'mothers-name', 'alias': 'alias',
'birthdate': 'date-of-birth', 'birth_place': 'place-of-birth', 'gender': 'gender','nationality1': 'nationality', 'birthdate': 'date-of-birth', 'birth_place': 'place-of-birth', 'gender': 'gender', 'nationality1': 'nationality',
'passport_number': 'passport-number', 'passport_country': 'passport-country', 'ssn': 'social-security-number', 'passport_number': 'passport-number', 'passport_country': 'passport-country', 'ssn': 'social-security-number',
'id_number': 'identity-card-number'} 'id_number': 'identity-card-number'}
@ -73,6 +74,7 @@ goAMLmapping = {'from_account': t_account_mapping, 'to_account': t_account_mappi
nodes_to_ignore = ['addresses', 'signatory'] nodes_to_ignore = ['addresses', 'signatory']
relationship_to_keep = ['signatory', 't_from', 't_from_my_client', 't_to', 't_to_my_client', 'address'] relationship_to_keep = ['signatory', 't_from', 't_from_my_client', 't_to', 't_to_my_client', 'address']
class GoAmlParser(): class GoAmlParser():
def __init__(self): def __init__(self):
self.misp_event = MISPEvent() self.misp_event = MISPEvent()
@ -145,6 +147,7 @@ class GoAmlParser():
to_country_attribute = {'object_relation': 'to-country', 'value': to_country} to_country_attribute = {'object_relation': 'to-country', 'value': to_country}
misp_object.add_attribute(**to_country_attribute) misp_object.add_attribute(**to_country_attribute)
def handler(q=False): def handler(q=False):
if q is False: if q is False:
return False return False
@ -157,16 +160,18 @@ def handler(q=False):
aml_parser = GoAmlParser() aml_parser = GoAmlParser()
try: try:
aml_parser.read_xml(data) aml_parser.read_xml(data)
except: except Exception:
misperrors['error'] = "Impossible to read XML data" misperrors['error'] = "Impossible to read XML data"
return misperrors return misperrors
aml_parser.parse_xml() aml_parser.parse_xml()
r = {'results': [obj.to_json() for obj in aml_parser.misp_event.objects]} r = {'results': [obj.to_json() for obj in aml_parser.misp_event.objects]}
return r return r
def introspection(): def introspection():
return mispattributes return mispattributes
def version(): def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo

View File

@ -2,7 +2,7 @@ import json
import base64 import base64
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
userConfig = { }; userConfig = {}
inputSource = ['file'] inputSource = ['file']
@ -19,23 +19,24 @@ def handler(q=False):
r = {'results': []} r = {'results': []}
request = json.loads(q) request = json.loads(q)
try: try:
mfile = base64.b64decode(request["data"]).decode('utf-8') mfile = base64.b64decode(request["data"]).decode('utf-8')
misp = json.loads(mfile) misp = json.loads(mfile)
event = misp['response'][0]['Event'] event = misp['response'][0]['Event']
for a in event["Attribute"]: for a in event["Attribute"]:
tmp = {} tmp = {}
tmp["values"] = a["value"] tmp["values"] = a["value"]
tmp["categories"] = a["category"] tmp["categories"] = a["category"]
tmp["types"] = a["type"] tmp["types"] = a["type"]
tmp["to_ids"] = a["to_ids"] tmp["to_ids"] = a["to_ids"]
tmp["comment"] = a["comment"] tmp["comment"] = a["comment"]
if a.get("data"): if a.get("data"):
tmp["data"] = a["data"] tmp["data"] = a["data"]
r['results'].append(tmp) r['results'].append(tmp)
except: except Exception:
pass pass
return r return r
def introspection(): def introspection():
modulesetup = {} modulesetup = {}
try: try:
@ -55,6 +56,7 @@ def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo
if __name__ == '__main__': if __name__ == '__main__':
x = open('test.json', 'r') x = open('test.json', 'r')
r = handler(q=x.read()) r = handler(q=x.read())

View File

@ -14,7 +14,7 @@ ch.setFormatter(formatter)
log.addHandler(ch) log.addHandler(ch)
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
userConfig = {}; userConfig = {}
inputSource = ['file'] inputSource = ['file']
@ -55,17 +55,17 @@ def handler(q=False):
if document.format == 'PDF': if document.format == 'PDF':
with document as pdf: with document as pdf:
# Get number of pages # Get number of pages
pages=len(pdf.sequence) pages = len(pdf.sequence)
log.debug("PDF with {} page(s) detected".format(pages)) log.debug("PDF with {} page(s) detected".format(pages))
# Create new image object where the height will be the number of pages. With huge PDFs this will overflow, break, consume silly memory etc… # Create new image object where the height will be the number of pages. With huge PDFs this will overflow, break, consume silly memory etc…
img = WImage(width=pdf.width, height=pdf.height * pages) img = WImage(width=pdf.width, height=pdf.height * pages)
# Cycle through pages and stitch it together to one big file # Cycle through pages and stitch it together to one big file
for p in range(pages): for p in range(pages):
log.debug("Stitching page {}".format(p+1)) log.debug("Stitching page {}".format(p + 1))
image = img.composite(pdf.sequence[p], top=pdf.height * p, left=0) image = img.composite(pdf.sequence[p], top=pdf.height * p, left=0)
# Create a png blob # Create a png blob
image = img.make_blob('png') image = img.make_blob('png')
log.debug("Final image size is {}x{}".format(pdf.width, pdf.height*(p+1))) log.debug("Final image size is {}x{}".format(pdf.width, pdf.height * (p + 1)))
else: else:
image = document image = document
@ -78,7 +78,6 @@ def handler(q=False):
misperrors['error'] = "Corrupt or not an image file." misperrors['error'] = "Corrupt or not an image file."
return misperrors return misperrors
ocrized = image_to_string(im) ocrized = image_to_string(im)
freetext = {} freetext = {}
@ -107,6 +106,7 @@ def version():
moduleinfo['config'] = moduleconfig moduleinfo['config'] = moduleconfig
return moduleinfo return moduleinfo
if __name__ == '__main__': if __name__ == '__main__':
x = open('test.json', 'r') x = open('test.json', 'r')
handler(q=x.read()) handler(q=x.read())

View File

@ -1,28 +1,27 @@
import json import json
import base64 import base64
import csv
misperrors = {'error': 'Error'} misperrors = {'error': 'Error'}
userConfig = { userConfig = {
'number1': { 'number1': {
'type': 'Integer', 'type': 'Integer',
'regex': '/^[0-4]$/i', 'regex': '/^[0-4]$/i',
'errorMessage': 'Expected a number in range [0-4]', 'errorMessage': 'Expected a number in range [0-4]',
'message': 'Column number used for value' 'message': 'Column number used for value'
}, },
'some_string': { 'some_string': {
'type': 'String', 'type': 'String',
'message': 'A text field' 'message': 'A text field'
}, },
'boolean_field': { 'boolean_field': {
'type': 'Boolean', 'type': 'Boolean',
'message': 'Boolean field test' 'message': 'Boolean field test'
}, },
'comment': { 'comment': {
'type': 'Integer', 'type': 'Integer',
'message': 'Column number used for comment' 'message': 'Column number used for comment'
} }
}; }
inputSource = ['file', 'paste'] inputSource = ['file', 'paste']
@ -39,8 +38,8 @@ def handler(q=False):
r = {'results': []} r = {'results': []}
request = json.loads(q) request = json.loads(q)
request["data"] = base64.b64decode(request["data"]) request["data"] = base64.b64decode(request["data"])
fields = ["value", "category", "type", "comment"] # fields = ["value", "category", "type", "comment"]
r = {"results":[{"values":["192.168.56.1"], "types":["ip-src"], "categories":["Network activity"]}]} r = {"results": [{"values": ["192.168.56.1"], "types":["ip-src"], "categories": ["Network activity"]}]}
return r return r

View File

@ -90,7 +90,7 @@ def handler(q=False):
'values': sample_filename, 'values': sample_filename,
'data': base64.b64encode(file_data).decode(), 'data': base64.b64encode(file_data).decode(),
'type': 'malware-sample', 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': True, 'comment': ''}) 'type': 'malware-sample', 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': True, 'comment': ''})
except Exception as e: except Exception:
# no 'sample' in archive, might be an url analysis, just ignore # no 'sample' in archive, might be an url analysis, just ignore
pass pass
@ -118,7 +118,7 @@ def process_analysis_json(analysis_json):
# this will always create a list, even with only one item # this will always create a list, even with only one item
if isinstance(process['connection_section']['connection'], dict): if isinstance(process['connection_section']['connection'], dict):
process['connection_section']['connection'] = [process['connection_section']['connection']] process['connection_section']['connection'] = [process['connection_section']['connection']]
# iterate over each entry # iterate over each entry
for connection_section_connection in process['connection_section']['connection']: for connection_section_connection in process['connection_section']['connection']:
# compensate for absurd behavior of the data format: if one entry = immediately the dict, if multiple entries = list containing dicts # compensate for absurd behavior of the data format: if one entry = immediately the dict, if multiple entries = list containing dicts
@ -126,7 +126,7 @@ def process_analysis_json(analysis_json):
for subsection in ['http_command', 'http_header']: for subsection in ['http_command', 'http_header']:
if isinstance(connection_section_connection[subsection], dict): if isinstance(connection_section_connection[subsection], dict):
connection_section_connection[subsection] = [connection_section_connection[subsection]] connection_section_connection[subsection] = [connection_section_connection[subsection]]
if 'name_to_ip' in connection_section_connection: # TA 6.1 data format if 'name_to_ip' in connection_section_connection: # TA 6.1 data format
connection_section_connection['@remote_ip'] = connection_section_connection['name_to_ip']['@result_addresses'] connection_section_connection['@remote_ip'] = connection_section_connection['name_to_ip']['@result_addresses']
connection_section_connection['@remote_hostname'] = connection_section_connection['name_to_ip']['@request_name'] connection_section_connection['@remote_hostname'] = connection_section_connection['name_to_ip']['@request_name']
@ -171,7 +171,7 @@ def process_analysis_json(analysis_json):
if ':' in val: if ':' in val:
try: try:
val_port = int(val.split(':')[1]) val_port = int(val.split(':')[1])
except ValueError as e: except ValueError:
val_port = False val_port = False
val_hostname = cleanup_hostname(val.split(':')[0]) val_hostname = cleanup_hostname(val.split(':')[0])
val_ip = cleanup_ip(val.split(':')[0]) val_ip = cleanup_ip(val.split(':')[0])

View File

@ -105,8 +105,8 @@ def handler(q=False):
url1 = "https://cloud.vmray.com/user/analysis/view?from_sample_id=%u" % sample_id url1 = "https://cloud.vmray.com/user/analysis/view?from_sample_id=%u" % sample_id
url2 = "&id=%u" % analysis_id url2 = "&id=%u" % analysis_id
url3 = "&sub=%2Freport%2Foverview.html" url3 = "&sub=%2Freport%2Foverview.html"
a_id["results"].append({ "values": url1 + url2 + url3, "types": "link" }) a_id["results"].append({"values": url1 + url2 + url3, "types": "link"})
vmray_results = {'results': vmray_results["results"] + a_id["results"] } vmray_results = {'results': vmray_results["results"] + a_id["results"]}
# Clean up (remove doubles) # Clean up (remove doubles)
if vti_patterns_found: if vti_patterns_found:
vmray_results = vmrayCleanup(vmray_results) vmray_results = vmrayCleanup(vmray_results)
@ -117,7 +117,7 @@ def handler(q=False):
else: else:
misperrors['error'] = "Unable to fetch sample id %u" % (sample_id) misperrors['error'] = "Unable to fetch sample id %u" % (sample_id)
return misperrors return misperrors
except: except Exception:
misperrors['error'] = "Unable to access VMRay API" misperrors['error'] = "Unable to access VMRay API"
return misperrors return misperrors
else: else:
@ -267,7 +267,7 @@ def vmrayGeneric(el, attr="", attrpos=1):
if content: if content:
if attr: if attr:
# Some elements are put between \"\" ; replace them to single # Some elements are put between \"\" ; replace them to single
content = content.replace("\"\"","\"") content = content.replace("\"\"", "\"")
content_split = content.split("\"") content_split = content.split("\"")
# Attributes are between open " and close "; so use > # Attributes are between open " and close "; so use >
if len(content_split) > attrpos: if len(content_split) > attrpos: