fix: Make pep8 happy

pipenv
Raphaël Vinot 2018-12-11 15:29:09 +01:00
parent d0aec62f1a
commit 8fc5b1fd1f
56 changed files with 695 additions and 638 deletions

View File

@ -13,7 +13,7 @@ python:
- "3.7-dev"
install:
- pip install -U nose codecov pytest
- pip install -U nose codecov pytest flake8
- pip install -U -r REQUIREMENTS
- pip install .
@ -30,6 +30,7 @@ script:
- sleep 5
- nosetests --with-coverage --cover-package=misp_modules
- kill -s INT $pid
- flake8 --ignore=E501,W503 misp_modules
after_success:
- coverage combine .coverage*

View File

@ -38,14 +38,14 @@ from tornado.concurrent import run_on_executor
from concurrent.futures import ThreadPoolExecutor
try:
from .modules import *
from .modules import * # noqa
HAS_PACKAGE_MODULES = True
except Exception as e:
print(e)
HAS_PACKAGE_MODULES = False
try:
from .helpers import *
from .helpers import * # noqa
HAS_PACKAGE_HELPERS = True
except Exception as e:
print(e)
@ -148,7 +148,7 @@ def load_package_modules():
mhandlers = {}
modules = []
for path, module in sys.modules.items():
r = re.findall("misp_modules[.]modules[.](\w+)[.]([^_]\w+)", path)
r = re.findall(r"misp_modules[.]modules[.](\w+)[.]([^_]\w+)", path)
if r and len(r[0]) == 2:
moduletype, modulename = r[0]
mhandlers[modulename] = module
@ -159,6 +159,9 @@ def load_package_modules():
class ListModules(tornado.web.RequestHandler):
global loaded_modules
global mhandlers
def get(self):
ret = []
for module in loaded_modules:

View File

@ -33,7 +33,7 @@ def selftest(enable=True):
r = redis.StrictRedis(host=hostname, port=port, db=db)
try:
r.ping()
except:
except Exception:
return 'Redis not running or not installed. Helper will be disabled.'
@ -62,6 +62,7 @@ def flush():
returncode = r.flushdb()
return returncode
if __name__ == "__main__":
import sys
if selftest() is not None:

View File

@ -1,3 +1,3 @@
from .expansion import *
from .import_mod import *
from .export_mod import *
from .expansion import * # noqa
from .import_mod import * # noqa
from .export_mod import * # noqa

View File

@ -1,4 +1,4 @@
from . import _vmray
from . import _vmray # noqa
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',

View File

@ -47,9 +47,11 @@ options = None
locale.setlocale(locale.LC_ALL, '')
class QueryError(Exception):
pass
class DnsdbClient(object):
def __init__(self, server, apikey, limit=None, http_proxy=None, https_proxy=None):
self.server = server
@ -81,7 +83,6 @@ class DnsdbClient(object):
return self._query(path, before, after)
def _query(self, path, before=None, after=None):
res = []
url = '%s/lookup/%s' % (self.server, path)
params = {}
@ -120,12 +121,15 @@ class DnsdbClient(object):
except (HTTPError, URLError) as e:
raise QueryError(str(e), sys.exc_traceback)
def quote(path):
return urllib_quote(path, safe='')
def sec_to_text(ts):
return time.strftime('%Y-%m-%d %H:%M:%S -0000', time.gmtime(ts))
def rrset_to_text(m):
s = StringIO()
@ -155,9 +159,11 @@ def rrset_to_text(m):
finally:
s.close()
def rdata_to_text(m):
return '%s IN %s %s' % (m['rrname'], m['rrtype'], m['rdata'])
def parse_config(cfg_files):
config = {}
@ -172,6 +178,7 @@ def parse_config(cfg_files):
return config
def time_parse(s):
try:
epoch = int(s)
@ -193,14 +200,15 @@ def time_parse(s):
m = re.match(r'^(?=\d)(?:(\d+)w)?(?:(\d+)d)?(?:(\d+)h)?(?:(\d+)m)?(?:(\d+)s?)?$', s, re.I)
if m:
return -1*(int(m.group(1) or 0)*604800 +
int(m.group(2) or 0)*86400+
int(m.group(3) or 0)*3600+
int(m.group(4) or 0)*60+
int(m.group(5) or 0))
return -1 * (int(m.group(1) or 0) * 604800
+ int(m.group(2) or 0) * 86400
+ int(m.group(3) or 0) * 3600
+ int(m.group(4) or 0) * 60
+ int(m.group(5) or 0))
raise ValueError('Invalid time: "%s"' % s)
def epipe_wrapper(func):
def f(*args, **kwargs):
try:
@ -211,31 +219,23 @@ def epipe_wrapper(func):
raise
return f
@epipe_wrapper
def main():
global cfg
global options
parser = optparse.OptionParser(epilog='Time formats are: "%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%d" (UNIX timestamp), "-%d" (Relative time in seconds), BIND format (e.g. 1w1h, (w)eek, (d)ay, (h)our, (m)inute, (s)econd)')
parser.add_option('-c', '--config', dest='config',
help='config file', action='append')
parser.add_option('-r', '--rrset', dest='rrset', type='string',
help='rrset <ONAME>[/<RRTYPE>[/BAILIWICK]]')
parser.add_option('-n', '--rdataname', dest='rdata_name', type='string',
help='rdata name <NAME>[/<RRTYPE>]')
parser.add_option('-i', '--rdataip', dest='rdata_ip', type='string',
help='rdata ip <IPADDRESS|IPRANGE|IPNETWORK>')
parser.add_option('-t', '--rrtype', dest='rrtype', type='string',
help='rrset or rdata rrtype')
parser.add_option('-b', '--bailiwick', dest='bailiwick', type='string',
help='rrset bailiwick')
parser.add_option('-c', '--config', dest='config', help='config file', action='append')
parser.add_option('-r', '--rrset', dest='rrset', type='string', help='rrset <ONAME>[/<RRTYPE>[/BAILIWICK]]')
parser.add_option('-n', '--rdataname', dest='rdata_name', type='string', help='rdata name <NAME>[/<RRTYPE>]')
parser.add_option('-i', '--rdataip', dest='rdata_ip', type='string', help='rdata ip <IPADDRESS|IPRANGE|IPNETWORK>')
parser.add_option('-t', '--rrtype', dest='rrtype', type='string', help='rrset or rdata rrtype')
parser.add_option('-b', '--bailiwick', dest='bailiwick', type='string', help='rrset bailiwick')
parser.add_option('-s', '--sort', dest='sort', type='string', help='sort key')
parser.add_option('-R', '--reverse', dest='reverse', action='store_true', default=False,
help='reverse sort')
parser.add_option('-j', '--json', dest='json', action='store_true', default=False,
help='output in JSON format')
parser.add_option('-l', '--limit', dest='limit', type='int', default=0,
help='limit number of results')
parser.add_option('-R', '--reverse', dest='reverse', action='store_true', default=False, help='reverse sort')
parser.add_option('-j', '--json', dest='json', action='store_true', default=False, help='output in JSON format')
parser.add_option('-l', '--limit', dest='limit', type='int', default=0, help='limit number of results')
parser.add_option('', '--before', dest='before', type='string', help='only output results seen before this time')
parser.add_option('', '--after', dest='after', type='string', help='only output results seen after this time')
@ -263,20 +263,20 @@ def main():
print(str(e), file=sys.stderr)
sys.exit(1)
if not 'DNSDB_SERVER' in cfg:
if 'DNSDB_SERVER' not in cfg:
cfg['DNSDB_SERVER'] = DEFAULT_DNSDB_SERVER
if not 'HTTP_PROXY' in cfg:
if 'HTTP_PROXY' not in cfg:
cfg['HTTP_PROXY'] = DEFAULT_HTTP_PROXY
if not 'HTTPS_PROXY' in cfg:
if 'HTTPS_PROXY' not in cfg:
cfg['HTTPS_PROXY'] = DEFAULT_HTTPS_PROXY
if not 'APIKEY' in cfg:
if 'APIKEY' not in cfg:
sys.stderr.write('dnsdb_query: APIKEY not defined in config file\n')
sys.exit(1)
client = DnsdbClient(cfg['DNSDB_SERVER'], cfg['APIKEY'],
limit=options.limit,
http_proxy=cfg['HTTP_PROXY'],
https_proxy=cfg['HTTPS_PROXY'])
limit=options.limit,
http_proxy=cfg['HTTP_PROXY'],
https_proxy=cfg['HTTPS_PROXY'])
if options.rrset:
if options.rrtype or options.bailiwick:
qargs = (options.rrset, options.rrtype, options.bailiwick)
@ -307,7 +307,7 @@ def main():
if options.sort:
results = list(results)
if len(results) > 0:
if not options.sort in results[0]:
if options.sort not in results[0]:
sort_keys = results[0].keys()
sort_keys.sort()
sys.stderr.write('dnsdb_query: invalid sort key "%s". valid sort keys are %s\n' % (options.sort, ', '.join(sort_keys)))
@ -319,5 +319,6 @@ def main():
print(e.message, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()

View File

@ -22,9 +22,9 @@ def handler(q=False):
misperrors['error'] = "Unsupported attributes type"
return misperrors
if not request.get('config') and not (request['config'].get('host') and
request['config'].get('port') and
request['config'].get('db')):
if not request.get('config') and not (request['config'].get('host')
and request['config'].get('port')
and request['config'].get('db')):
misperrors['error'] = 'ASN description history configuration is missing'
return misperrors

View File

@ -12,24 +12,25 @@ moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
moduleconfig = []
blockchain_firstseen='https://blockchain.info/q/addressfirstseen/'
blockchain_balance='https://blockchain.info/q/addressbalance/'
blockchain_totalreceived='https://blockchain.info/q/getreceivedbyaddress/'
blockchain_all='https://blockchain.info/rawaddr/'
converter = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym=BTC&tsyms=USD,EUR&ts='
blockchain_firstseen = 'https://blockchain.info/q/addressfirstseen/'
blockchain_balance = 'https://blockchain.info/q/addressbalance/'
blockchain_totalreceived = 'https://blockchain.info/q/getreceivedbyaddress/'
blockchain_all = 'https://blockchain.info/rawaddr/{}?filter=5{}'
converter = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym=BTC&tsyms=USD,EUR&ts={}'
converter_rls = 'https://min-api.cryptocompare.com/stats/rate/limit'
result_text = ""
g_rate_limit = 300
start_time = 0
conversion_rates = {}
def get_consumption(output=False):
try:
req = requests.get(converter_rls)
jreq = req.json()
minute = str(jreq['Data']['calls_left']['minute'])
hour = str(jreq['Data']['calls_left']['hour'])
except:
hour = str(jreq['Data']['calls_left']['hour'])
except Exception:
minute = str(-1)
hour = str(-1)
# Debug out for the console
@ -53,20 +54,20 @@ def convert(btc, timestamp):
minute, hour = get_consumption()
g_rate_limit -= 1
now = time.time()
delta = now - start_time
#print(g_rate_limit)
# delta = now - start_time
# print(g_rate_limit)
if g_rate_limit <= 10:
minute, hour = get_consumption(output=True)
if int(minute) <= 10:
#print(minute)
#get_consumption(output=True)
# print(minute)
# get_consumption(output=True)
time.sleep(3)
else:
mprint(minute)
start_time = time.time()
g_rate_limit = int(minute)
try:
req = requests.get(converter+str(timestamp))
req = requests.get(converter.format(timestamp))
jreq = req.json()
usd = jreq['BTC']['USD']
eur = jreq['BTC']['EUR']
@ -78,7 +79,7 @@ def convert(btc, timestamp):
# Actually convert and return the values
u = usd * btc
e = eur * btc
return u,e
return u, e
def mprint(input):
@ -90,8 +91,8 @@ def mprint(input):
def handler(q=False):
global result_text
global conversion_rates
start_time = time.time()
now = time.time()
# start_time = time.time()
# now = time.time()
if q is False:
return False
request = json.loads(q)
@ -107,10 +108,10 @@ def handler(q=False):
mprint("\nAddress:\t" + btc)
try:
req = requests.get(blockchain_all+btc+"?limit=50&filter=5")
req = requests.get(blockchain_all.format(btc, "&limit=50"))
jreq = req.json()
except Exception as e:
#print(e)
except Exception:
# print(e)
print(req.text)
result_text = ""
sys.exit(1)
@ -130,11 +131,11 @@ def handler(q=False):
i = 0
while i < n_tx:
if click is False:
req = requests.get(blockchain_all+btc+"?limit=5&offset="+str(i)+"&filter=5")
req = requests.get(blockchain_all.format(btc, "&limit=5&offset={}".format(i)))
if n_tx > 5:
n_tx = 5
else:
req = requests.get(blockchain_all+btc+"?limit=50&offset="+str(i)+"&filter=5")
req = requests.get(blockchain_all.format(btc, "&limit=50&offset={}".format(i)))
jreq = req.json()
if jreq['txs']:
for transactions in jreq['txs']:
@ -144,8 +145,8 @@ def handler(q=False):
script_old = tx['script']
if tx['prev_out']['value'] != 0 and tx['prev_out']['addr'] == btc:
datetime = time.strftime("%d %b %Y %H:%M:%S %Z", time.localtime(int(transactions['time'])))
value = float(tx['prev_out']['value'] / 100000000 )
u,e = convert(value, transactions['time'])
value = float(tx['prev_out']['value'] / 100000000)
u, e = convert(value, transactions['time'])
mprint("#" + str(n_tx - i) + "\t" + str(datetime) + "\t-{0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR".format(value, u, e).rstrip('0'))
if script_old != tx['script']:
i += 1
@ -153,16 +154,16 @@ def handler(q=False):
sum_counter += 1
sum += value
if sum_counter > 1:
u,e = convert(sum, transactions['time'])
u, e = convert(sum, transactions['time'])
mprint("\t\t\t\t\t----------------------------------------------")
mprint("#" + str(n_tx - i) + "\t\t\t\t Sum:\t-{0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR\n".format(sum, u, e).rstrip('0'))
for tx in transactions['out']:
if tx['value'] != 0 and tx['addr'] == btc:
datetime = time.strftime("%d %b %Y %H:%M:%S %Z", time.localtime(int(transactions['time'])))
value = float(tx['value'] / 100000000 )
u,e = convert(value, transactions['time'])
value = float(tx['value'] / 100000000)
u, e = convert(value, transactions['time'])
mprint("#" + str(n_tx - i) + "\t" + str(datetime) + "\t {0:10.8f} BTC {1:10.2f} USD\t{2:10.2f} EUR".format(value, u, e).rstrip('0'))
#i += 1
# i += 1
i += 1
r = {

View File

@ -12,20 +12,21 @@ moduleinfo = {'version': '1', 'author': 'Hannah Ward',
# config fields that your code expects from the site admin
moduleconfig = []
common_tlds = {"com":"Commercial (Worldwide)",
"org":"Organisation (Worldwide)",
"net":"Network (Worldwide)",
"int":"International (Worldwide)",
"edu":"Education (Usually USA)",
"gov":"Government (USA)"
}
common_tlds = {"com": "Commercial (Worldwide)",
"org": "Organisation (Worldwide)",
"net": "Network (Worldwide)",
"int": "International (Worldwide)",
"edu": "Education (Usually USA)",
"gov": "Government (USA)"
}
codes = False
def handler(q=False):
global codes
if not codes:
codes = requests.get("http://www.geognos.com/api/en/countries/info/all.json").json()
codes = requests.get("http://www.geognos.com/api/en/countries/info/all.json").json()
if q is False:
return False
request = json.loads(q)
@ -36,18 +37,18 @@ def handler(q=False):
# Check if it's a common, non country one
if ext in common_tlds.keys():
val = common_tlds[ext]
val = common_tlds[ext]
else:
# Retrieve a json full of country info
if not codes["StatusMsg"] == "OK":
val = "Unknown"
else:
# Find our code based on TLD
codes = codes["Results"]
for code in codes.keys():
if codes[code]["CountryCodes"]["tld"] == ext:
val = codes[code]["Name"]
r = {'results': [{'types':['text'], 'values':[val]}]}
# Retrieve a json full of country info
if not codes["StatusMsg"] == "OK":
val = "Unknown"
else:
# Find our code based on TLD
codes = codes["Results"]
for code in codes.keys():
if codes[code]["CountryCodes"]["tld"] == ext:
val = codes[code]["Name"]
r = {'results': [{'types': ['text'], 'values':[val]}]}
return r
@ -58,4 +59,3 @@ def introspection():
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,6 +1,5 @@
import json
import datetime
from collections import defaultdict
import sys
try:
import dns.resolver
@ -30,12 +29,14 @@ dbl_mapping = {'127.0.1.2': 'spam domain',
'127.0.1.106': 'abused legit botnet C&C',
'127.0.1.255': 'IP queries prohibited!'}
def fetch_requested_value(request):
for attribute_type in mispattributes['input']:
if request.get(attribute_type):
return request[attribute_type].split('|')[0]
return None
def handler(q=False):
if q is False:
return False
@ -52,9 +53,11 @@ def handler(q=False):
result = str(e)
return {'results': [{'types': mispattributes.get('output'), 'values': result}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -43,7 +43,7 @@ def handler(q=False):
except dns.exception.Timeout:
misperrors['error'] = "Timeout"
return misperrors
except:
except Exception:
misperrors['error'] = "DNS resolving error"
return misperrors

View File

@ -51,7 +51,7 @@ def lookup_name(client, name):
for i in item.get('rdata'):
# grab email field and replace first dot by @ to convert to an email address
yield(i.split(' ')[1].rstrip('.').replace('.', '@', 1))
except QueryError as e:
except QueryError:
pass
try:
@ -59,7 +59,7 @@ def lookup_name(client, name):
for item in res:
if item.get('rrtype') in ['A', 'AAAA', 'CNAME']:
yield(item.get('rrname').rstrip('.'))
except QueryError as e:
except QueryError:
pass
@ -68,7 +68,7 @@ def lookup_ip(client, ip):
res = client.query_rdata_ip(ip)
for item in res:
yield(item['rrname'].rstrip('.'))
except QueryError as e:
except QueryError:
pass

View File

@ -27,7 +27,7 @@ try:
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'geoip_country.cfg'))
gi = pygeoip.GeoIP(config.get('GEOIP', 'database'))
enabled = True
except:
except Exception:
enabled = False
@ -49,7 +49,7 @@ def handler(q=False):
try:
answer = gi.country_code_by_addr(toquery)
except:
except Exception:
misperrors['error'] = "GeoIP resolving error"
return misperrors

View File

@ -20,7 +20,7 @@ def handler(q=False):
if v is None:
misperrors['error'] = 'Hash value is missing.'
return misperrors
r = requests.post(hashddapi_url, data={'hash':v})
r = requests.post(hashddapi_url, data={'hash': v})
if r.status_code == 200:
state = json.loads(r.text)
if state:

View File

@ -24,9 +24,9 @@ def handler(q=False):
misperrors['error'] = "Unsupported attributes type"
return misperrors
if not request.get('config') and not (request['config'].get('host') and
request['config'].get('port') and
request['config'].get('db')):
if not request.get('config') and not (request['config'].get('host')
and request['config'].get('port')
and request['config'].get('db')):
misperrors['error'] = 'IP ASN history configuration is missing'
return misperrors

View File

@ -45,7 +45,7 @@ def parse_iprep(ip, api):
url = 'https://www.packetmail.net/iprep.php/%s' % ip
try:
data = requests.get(url, params={'apikey': api}).json()
except:
except Exception:
return ['Error pulling data'], rep
# print '%s' % data
for name, val in data.items():
@ -71,11 +71,11 @@ def parse_iprep(ip, api):
misp_val = context
full_text += '\n%s' % context
misp_comment = 'IPRep Source %s: %s' % (name, val['last_seen'])
rep.append({'types': mispattributes['output'], 'categories':['External analysis'], 'values': misp_val, 'comment': misp_comment})
except:
rep.append({'types': mispattributes['output'], 'categories': ['External analysis'], 'values': misp_val, 'comment': misp_comment})
except Exception:
err.append('Error parsing source: %s' % name)
rep.append({'types': ['freetext'], 'values': full_text , 'comment': 'Free text import of IPRep'})
rep.append({'types': ['freetext'], 'values': full_text, 'comment': 'Free text import of IPRep'})
return err, rep

View File

@ -86,32 +86,30 @@ def handler(q=False):
response.block_details.date_updated.strftime('%d %B %Y') if response.block_details.date_updated else None
results = {
'results': [
{'types': ['text'], 'values':
{
# Mac address details
'Valid MAC address': "True" if response.mac_address_details.is_valid else "False",
'Transmission type': response.mac_address_details.transmission_type,
'Administration type': response.mac_address_details.administration_type,
'results':
[{'types': ['text'], 'values':
{
# Mac address details
'Valid MAC address': "True" if response.mac_address_details.is_valid else "False",
'Transmission type': response.mac_address_details.transmission_type,
'Administration type': response.mac_address_details.administration_type,
# Vendor details
'OUI': response.vendor_details.oui,
'Vendor details are hidden': "True" if response.vendor_details.is_private else "False",
'Company name': response.vendor_details.company_name,
'Company\'s address': response.vendor_details.company_address,
'County code': response.vendor_details.country_code,
# Vendor details
'OUI': response.vendor_details.oui,
'Vendor details are hidden': "True" if response.vendor_details.is_private else "False",
'Company name': response.vendor_details.company_name,
'Company\'s address': response.vendor_details.company_address,
'County code': response.vendor_details.country_code,
# Block details
'Block found': "True" if response.block_details.block_found else "False",
'The left border of the range': response.block_details.border_left,
'The right border of the range': response.block_details.border_right,
'The total number of MAC addresses in this range': response.block_details.block_size,
'Assignment block size': response.block_details.assignment_block_size,
'Date when the range was allocated': date_created,
'Date when the range was last updated': date_updated
}
}
]
# Block details
'Block found': "True" if response.block_details.block_found else "False",
'The left border of the range': response.block_details.border_left,
'The right border of the range': response.block_details.border_right,
'The total number of MAC addresses in this range': response.block_details.block_size,
'Assignment block size': response.block_details.assignment_block_size,
'Date when the range was allocated': date_created,
'Date when the range was last updated': date_updated
}}]
}
return results

View File

@ -1,4 +1,3 @@
import json
# -*- coding: utf-8 -*-
import json
@ -9,7 +8,8 @@ except ImportError:
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'], 'output': ['hostname', 'domain', 'ip-src', 'ip-dst','url']}
mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'],
'output': ['hostname', 'domain', 'ip-src', 'ip-dst', 'url']}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven',
'description': 'Query on Onyphe',
@ -54,7 +54,7 @@ def handle_expansion(api, ip, misperrors):
misperrors['error'] = result['message']
return misperrors
categories = list(set([item['@category'] for item in result['results']]))
# categories = list(set([item['@category'] for item in result['results']]))
result_filtered = {"results": []}
urls_pasties = []
@ -72,9 +72,9 @@ def handle_expansion(api, ip, misperrors):
os_target = r['os']
if os_target != 'Unknown':
os_list.append(r['os'])
elif r['@category'] == 'resolver' and r['type'] =='reverse':
elif r['@category'] == 'resolver' and r['type'] == 'reverse':
domains_resolver.append(r['reverse'])
elif r['@category'] == 'resolver' and r['type'] =='forward':
elif r['@category'] == 'resolver' and r['type'] == 'forward':
domains_forward.append(r['forward'])
result_filtered['results'].append({'types': ['url'], 'values': urls_pasties,
@ -90,7 +90,7 @@ def handle_expansion(api, ip, misperrors):
result_filtered['results'].append({'types': ['domain'],
'values': list(set(domains_resolver)),
'categories': ['Network activity'],
'comment': 'resolver to %s' % ip })
'comment': 'resolver to %s' % ip})
result_filtered['results'].append({'types': ['domain'],
'values': list(set(domains_forward)),

View File

@ -1,4 +1,3 @@
import json
# -*- coding: utf-8 -*-
import json
@ -10,7 +9,7 @@ except ImportError:
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'],
'output': ['hostname', 'domain', 'ip-src', 'ip-dst','url']}
'output': ['hostname', 'domain', 'ip-src', 'ip-dst', 'url']}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven',
@ -38,10 +37,10 @@ def handler(q=False):
ip = ''
if request.get('ip-src'):
ip = request['ip-src']
return handle_ip(api ,ip, misperrors)
return handle_ip(api, ip, misperrors)
elif request.get('ip-dst'):
ip = request['ip-dst']
return handle_ip(api,ip,misperrors)
return handle_ip(api, ip, misperrors)
elif request.get('domain'):
domain = request['domain']
return handle_domain(api, domain, misperrors)
@ -91,11 +90,11 @@ def handle_ip(api, ip, misperrors):
r, status_ok = expand_syscan(api, ip, misperrors)
if status_ok:
result_filtered['results'].extend(r)
result_filtered['results'].extend(r)
else:
misperrors['error'] = "Error syscan result"
misperrors['error'] = "Error syscan result"
r, status_ok = expand_pastries(api,misperrors,ip=ip)
r, status_ok = expand_pastries(api, misperrors, ip=ip)
if status_ok:
result_filtered['results'].extend(r)
@ -185,11 +184,11 @@ def expand_syscan(api, ip, misperror):
return r, status_ok
def expand_datascan(api, misperror,**kwargs):
def expand_datascan(api, misperror, **kwargs):
status_ok = False
r = []
ip = ''
query =''
# ip = ''
query = ''
asn_list = []
geoloc = []
orgs = []
@ -311,7 +310,7 @@ def expand_pastries(api, misperror, **kwargs):
query = kwargs.get('domain')
result = api.search_pastries('domain:%s' % query)
if result['status'] =='ok':
if result['status'] == 'ok':
status_ok = True
for item in result['results']:
if item['@category'] == 'pastries':
@ -328,7 +327,7 @@ def expand_pastries(api, misperror, **kwargs):
r.append({'types': ['url'],
'values': urls_pasties,
'categories': ['External analysis'],
'comment':'URLs of pasties where %s has found' % query})
'comment': 'URLs of pasties where %s has found' % query})
r.append({'types': ['domain'], 'values': list(set(domains)),
'categories': ['Network activity'],
'comment': 'Domains found in pasties of Onyphe'})
@ -340,7 +339,7 @@ def expand_pastries(api, misperror, **kwargs):
return r, status_ok
def expand_threatlist(api, misperror,**kwargs):
def expand_threatlist(api, misperror, **kwargs):
status_ok = False
r = []
@ -366,7 +365,8 @@ def expand_threatlist(api, misperror,**kwargs):
'comment': '%s is present in threatlist' % query
})
return r,status_ok
return r, status_ok
def introspection():
return mispattributes

View File

@ -15,9 +15,10 @@ moduleinfo = {'version': '1', 'author': 'chrisdoman',
# We're not actually using the API key yet
moduleconfig = ["apikey"]
# Avoid adding windows update to enrichment etc.
def isBlacklisted(value):
blacklist = ['0.0.0.0', '8.8.8.8', '255.255.255.255', '192.168.56.' , 'time.windows.com']
blacklist = ['0.0.0.0', '8.8.8.8', '255.255.255.255', '192.168.56.', 'time.windows.com']
for b in blacklist:
if value in b:
@ -25,10 +26,12 @@ def isBlacklisted(value):
return True
def valid_ip(ip):
m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip)
return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups()))
def findAll(data, keys):
a = []
if isinstance(data, dict):
@ -43,9 +46,11 @@ def findAll(data, keys):
a.extend(findAll(i, keys))
return a
def valid_email(email):
return bool(re.search(r"[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+\/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?", email))
def handler(q=False):
if q is False:
return False
@ -99,19 +104,17 @@ def getHash(_hash, key):
def getIP(ip, key):
ret = []
req = json.loads( requests.get("https://otx.alienvault.com/otxapi/indicator/ip/malware/" + ip + "?limit=1000").text )
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/ip/malware/" + ip + "?limit=1000").text)
for _hash in findAll(req, "hash"):
ret.append({"types": ["sha256"], "values": [_hash]})
req = json.loads( requests.get("https://otx.alienvault.com/otxapi/indicator/ip/passive_dns/" + ip).text )
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/ip/passive_dns/" + ip).text)
for hostname in findAll(req, "hostname"):
if not isBlacklisted(hostname):
ret.append({"types": ["hostname"], "values": [hostname]})
return ret
@ -119,7 +122,7 @@ def getDomain(domain, key):
ret = []
req = json.loads( requests.get("https://otx.alienvault.com/otxapi/indicator/domain/malware/" + domain + "?limit=1000").text )
req = json.loads(requests.get("https://otx.alienvault.com/otxapi/indicator/domain/malware/" + domain + "?limit=1000").text)
for _hash in findAll(req, "hash"):
ret.append({"types": ["sha256"], "values": [_hash]})
@ -144,6 +147,7 @@ def getDomain(domain, key):
return ret
def introspection():
return mispattributes

View File

@ -331,7 +331,7 @@ def handler(q=False):
output['results'] += results
else:
log.error("Unsupported query pattern issued.")
except:
except Exception:
return misperrors
return output

View File

@ -1,5 +1,5 @@
import json
import datetime
import sys
try:
import dns.resolver
@ -18,64 +18,65 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
moduleconfig = []
rbls = {
'spam.spamrats.com': 'http://www.spamrats.com',
'spamguard.leadmon.net': 'http://www.leadmon.net/SpamGuard/',
'rbl-plus.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'web.dnsbl.sorbs.net': 'http://www.sorbs.net',
'ix.dnsbl.manitu.net': 'http://www.dnsbl.manitu.net',
'virus.rbl.jp': 'http://www.rbl.jp',
'dul.dnsbl.sorbs.net': 'http://www.sorbs.net',
'bogons.cymru.com': 'http://www.team-cymru.org/Services/Bogons/',
'psbl.surriel.com': 'http://psbl.surriel.com',
'misc.dnsbl.sorbs.net': 'http://www.sorbs.net',
'httpbl.abuse.ch': 'http://dnsbl.abuse.ch',
'combined.njabl.org': 'http://combined.njabl.org',
'smtp.dnsbl.sorbs.net': 'http://www.sorbs.net',
'korea.services.net': 'http://korea.services.net',
'drone.abuse.ch': 'http://dnsbl.abuse.ch',
'rbl.efnetrbl.org': 'http://rbl.efnetrbl.org',
'cbl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US',
'b.barracudacentral.org': 'http://www.barracudacentral.org/rbl/removal-request',
'bl.spamcannibal.org': 'http://www.spamcannibal.org',
'xbl.spamhaus.org': 'http://www.spamhaus.org/xbl/',
'zen.spamhaus.org': 'http://www.spamhaus.org/zen/',
'rbl.suresupport.com': 'http://suresupport.com/postmaster',
'db.wpbl.info': 'http://www.wpbl.info',
'sbl.spamhaus.org': 'http://www.spamhaus.org/sbl/',
'http.dnsbl.sorbs.net': 'http://www.sorbs.net',
'csi.cloudmark.com': 'http://www.cloudmark.com/en/products/cloudmark-sender-intelligence/index',
'rbl.interserver.net': 'http://rbl.interserver.net',
'ubl.unsubscore.com': 'http://www.lashback.com/blacklist/',
'dnsbl.sorbs.net': 'http://www.sorbs.net',
'virbl.bit.nl': 'http://virbl.bit.nl',
'pbl.spamhaus.org': 'http://www.spamhaus.org/pbl/',
'socks.dnsbl.sorbs.net': 'http://www.sorbs.net',
'short.rbl.jp': 'http://www.rbl.jp',
'dnsbl.dronebl.org': 'http://www.dronebl.org',
'blackholes.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'truncate.gbudb.net': 'http://www.gbudb.com/truncate/index.jsp',
'dyna.spamrats.com': 'http://www.spamrats.com',
'spamrbl.imp.ch': 'http://antispam.imp.ch',
'spam.dnsbl.sorbs.net': 'http://www.sorbs.net',
'wormrbl.imp.ch': 'http://antispam.imp.ch',
'query.senderbase.org': 'http://www.senderbase.org/about',
'opm.tornevall.org': 'http://dnsbl.tornevall.org',
'netblock.pedantic.org': 'http://pedantic.org',
'access.redhawk.org': 'http://www.redhawk.org/index.php?option=com_wrapper&Itemid=33',
'cdl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US',
'multi.surbl.org': 'http://www.surbl.org',
'noptr.spamrats.com': 'http://www.spamrats.com',
'dnsbl.inps.de': 'http://dnsbl.inps.de/index.cgi?lang=en',
'bl.spamcop.net': 'http://bl.spamcop.net',
'cbl.abuseat.org': 'http://cbl.abuseat.org',
'dsn.rfc-ignorant.org': 'http://www.rfc-ignorant.org/policy-dsn.php',
'zombie.dnsbl.sorbs.net': 'http://www.sorbs.net',
'dnsbl.njabl.org': 'http://dnsbl.njabl.org',
'relays.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'rbl.spamlab.com': 'http://tools.appriver.com/index.aspx?tool=rbl',
'all.bl.blocklist.de': 'http://www.blocklist.de/en/rbldns.html'
'spam.spamrats.com': 'http://www.spamrats.com',
'spamguard.leadmon.net': 'http://www.leadmon.net/SpamGuard/',
'rbl-plus.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'web.dnsbl.sorbs.net': 'http://www.sorbs.net',
'ix.dnsbl.manitu.net': 'http://www.dnsbl.manitu.net',
'virus.rbl.jp': 'http://www.rbl.jp',
'dul.dnsbl.sorbs.net': 'http://www.sorbs.net',
'bogons.cymru.com': 'http://www.team-cymru.org/Services/Bogons/',
'psbl.surriel.com': 'http://psbl.surriel.com',
'misc.dnsbl.sorbs.net': 'http://www.sorbs.net',
'httpbl.abuse.ch': 'http://dnsbl.abuse.ch',
'combined.njabl.org': 'http://combined.njabl.org',
'smtp.dnsbl.sorbs.net': 'http://www.sorbs.net',
'korea.services.net': 'http://korea.services.net',
'drone.abuse.ch': 'http://dnsbl.abuse.ch',
'rbl.efnetrbl.org': 'http://rbl.efnetrbl.org',
'cbl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US',
'b.barracudacentral.org': 'http://www.barracudacentral.org/rbl/removal-request',
'bl.spamcannibal.org': 'http://www.spamcannibal.org',
'xbl.spamhaus.org': 'http://www.spamhaus.org/xbl/',
'zen.spamhaus.org': 'http://www.spamhaus.org/zen/',
'rbl.suresupport.com': 'http://suresupport.com/postmaster',
'db.wpbl.info': 'http://www.wpbl.info',
'sbl.spamhaus.org': 'http://www.spamhaus.org/sbl/',
'http.dnsbl.sorbs.net': 'http://www.sorbs.net',
'csi.cloudmark.com': 'http://www.cloudmark.com/en/products/cloudmark-sender-intelligence/index',
'rbl.interserver.net': 'http://rbl.interserver.net',
'ubl.unsubscore.com': 'http://www.lashback.com/blacklist/',
'dnsbl.sorbs.net': 'http://www.sorbs.net',
'virbl.bit.nl': 'http://virbl.bit.nl',
'pbl.spamhaus.org': 'http://www.spamhaus.org/pbl/',
'socks.dnsbl.sorbs.net': 'http://www.sorbs.net',
'short.rbl.jp': 'http://www.rbl.jp',
'dnsbl.dronebl.org': 'http://www.dronebl.org',
'blackholes.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'truncate.gbudb.net': 'http://www.gbudb.com/truncate/index.jsp',
'dyna.spamrats.com': 'http://www.spamrats.com',
'spamrbl.imp.ch': 'http://antispam.imp.ch',
'spam.dnsbl.sorbs.net': 'http://www.sorbs.net',
'wormrbl.imp.ch': 'http://antispam.imp.ch',
'query.senderbase.org': 'http://www.senderbase.org/about',
'opm.tornevall.org': 'http://dnsbl.tornevall.org',
'netblock.pedantic.org': 'http://pedantic.org',
'access.redhawk.org': 'http://www.redhawk.org/index.php?option=com_wrapper&Itemid=33',
'cdl.anti-spam.org.cn': 'http://www.anti-spam.org.cn/?Locale=en_US',
'multi.surbl.org': 'http://www.surbl.org',
'noptr.spamrats.com': 'http://www.spamrats.com',
'dnsbl.inps.de': 'http://dnsbl.inps.de/index.cgi?lang=en',
'bl.spamcop.net': 'http://bl.spamcop.net',
'cbl.abuseat.org': 'http://cbl.abuseat.org',
'dsn.rfc-ignorant.org': 'http://www.rfc-ignorant.org/policy-dsn.php',
'zombie.dnsbl.sorbs.net': 'http://www.sorbs.net',
'dnsbl.njabl.org': 'http://dnsbl.njabl.org',
'relays.mail-abuse.org': 'http://www.mail-abuse.com/lookup.html',
'rbl.spamlab.com': 'http://tools.appriver.com/index.aspx?tool=rbl',
'all.bl.blocklist.de': 'http://www.blocklist.de/en/rbldns.html'
}
def handler(q=False):
if q is False:
return False
@ -89,11 +90,11 @@ def handler(q=False):
return misperrors
listed = []
info = []
ipRev = '.'.join(ip.split('.')[::-1])
ipRev = '.'.join(ip.split('.')[::-1])
for rbl in rbls:
query = '{}.{}'.format(ipRev, rbl)
try:
txt = resolver.query(query,'TXT')
txt = resolver.query(query, 'TXT')
listed.append(query)
info.append([str(t) for t in txt])
except Exception:
@ -101,9 +102,11 @@ def handler(q=False):
result = "\n".join(["{}: {}".format(l, " - ".join(i)) for l, i in zip(listed, info)])
return {'results': [{'types': mispattributes.get('output'), 'values': result}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,5 +1,5 @@
import json
import dns.reversename, dns.resolver
from dns import reversename, resolver, exception
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'domain|ip'], 'output': ['hostname']}
@ -12,6 +12,7 @@ moduleinfo = {'version': '0.1', 'author': 'Andreas Muehlemann',
# config fields that your code expects from the site admin
moduleconfig = ['nameserver']
def handler(q=False):
if q is False:
return False
@ -26,9 +27,9 @@ def handler(q=False):
return False
# reverse lookup for ip
revname = dns.reversename.from_address(toquery)
revname = reversename.from_address(toquery)
r = dns.resolver.Resolver()
r = resolver.Resolver()
r.timeout = 2
r.lifetime = 2
@ -42,13 +43,13 @@ def handler(q=False):
try:
answer = r.query(revname, 'PTR')
except dns.resolver.NXDOMAIN:
except resolver.NXDOMAIN:
misperrors['error'] = "NXDOMAIN"
return misperrors
except dns.exception.Timeout:
except exception.Timeout:
misperrors['error'] = "Timeout"
return misperrors
except:
except Exception:
misperrors['error'] = "DNS resolving error"
return misperrors
@ -56,9 +57,11 @@ def handler(q=False):
'values':[str(answer[0])]}]}
return r
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -114,8 +114,7 @@ def handle_domain(api, domain, misperrors):
if r:
result_filtered['results'].extend(r)
else:
misperrors['error'] = misperrors[
'error'] + ' Error in expand History DNS'
misperrors['error'] = misperrors['error'] + ' Error in expand History DNS'
return misperrors
r, status_ok = expand_history_whois(api, domain)
@ -124,8 +123,7 @@ def handle_domain(api, domain, misperrors):
if r:
result_filtered['results'].extend(r)
else:
misperrors['error'] = misperrors['error'] + \
' Error in expand History Whois'
misperrors['error'] = misperrors['error'] + ' Error in expand History Whois'
return misperrors
return result_filtered

View File

@ -1,4 +1,6 @@
import sys, os, io, json
import sys
import io
import json
try:
from sigma.parser import SigmaCollectionParser
from sigma.config import SigmaConfiguration
@ -13,6 +15,7 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer', 'module-type': ['e
moduleconfig = []
sigma_targets = ('es-dsl', 'es-qs', 'graylog', 'kibana', 'xpack-watcher', 'logpoint', 'splunk', 'grep', 'wdatp', 'splunkxml', 'arcsight', 'qualys')
def handler(q=False):
if q is False:
return False
@ -35,16 +38,18 @@ def handler(q=False):
backend.finalize()
print("#NEXT")
targets.append(t)
except:
except Exception:
continue
sys.stdout = old_stdout
results = result.getvalue()[:-5].split('#NEXT')
d_result = {t: r.strip() for t,r in zip(targets, results)}
d_result = {t: r.strip() for t, r in zip(targets, results)}
return {'results': [{'types': mispattributes['output'], 'values': d_result}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -12,6 +12,7 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer', 'module-type': ['e
'description': 'An expansion hover module to perform a syntax check on sigma rules'}
moduleconfig = []
def handler(q=False):
if q is False:
return False
@ -27,9 +28,11 @@ def handler(q=False):
result = ("Syntax error: {}".format(str(e)))
return {'results': [{'types': mispattributes['output'], 'values': result}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -10,6 +10,7 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer', 'module-type': ['e
'description': 'An expansion hover module to perform a syntax check on stix2 patterns.'}
moduleconfig = []
def handler(q=False):
if q is False:
return False
@ -27,16 +28,18 @@ def handler(q=False):
if syntax_errors:
s = 's' if len(syntax_errors) > 1 else ''
s_errors = ""
for error in syntax_errors:
for error in syntax_errors:
s_errors += "{}\n".format(error[6:])
result = "Syntax error{}: \n{}".format(s, s_errors[:-1])
else:
result = "Syntax valid"
return {'results': [{'types': mispattributes['output'], 'values': result}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -17,7 +17,7 @@ moduleconfig = []
# Avoid adding windows update to enrichment etc.
def isBlacklisted(value):
blacklist = ['8.8.8.8', '255.255.255.255', '192.168.56.' , 'time.windows.com']
blacklist = ['8.8.8.8', '255.255.255.255', '192.168.56.', 'time.windows.com']
for b in blacklist:
if value in b:
@ -25,28 +25,31 @@ def isBlacklisted(value):
return False
def valid_ip(ip):
m = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ip)
return bool(m) and all(map(lambda n: 0 <= int(n) <= 255, m.groups()))
def valid_domain(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == ".":
hostname = hostname[:-1] # strip exactly one dot from the right, if present
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
hostname = hostname[:-1] # strip exactly one dot from the right, if present
allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split("."))
def valid_email(email):
return bool(re.search(r"^[\w\.\+\-]+\@[\w]+\.[a-z]{2,3}$", email))
def handler(q=False):
if q is False:
return False
q = json.loads(q)
r = {"results": []}
if "ip-src" in q:
@ -98,7 +101,7 @@ def getHash(hash):
def getIP(ip):
ret = []
req = json.loads( requests.get("https://www.threatcrowd.org/searchApi/v2/ip/report/?ip=" + ip).text )
req = json.loads(requests.get("https://www.threatcrowd.org/searchApi/v2/ip/report/?ip=" + ip).text)
if "resolutions" in req:
for dns in req["resolutions"]:
@ -110,11 +113,9 @@ def getIP(ip):
for hash in req["hashes"]:
ret.append({"types": ["md5"], "values": [hash]})
return ret
def getEmail(email):
ret = []
j = requests.get("https://www.threatcrowd.org/searchApi/v2/email/report/?email=" + email).text
@ -129,11 +130,10 @@ def getEmail(email):
return ret
def getDomain(domain):
ret = []
req = json.loads( requests.get("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=" + domain).text )
req = json.loads(requests.get("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=" + domain).text)
if "resolutions" in req:
for dns in req["resolutions"]:
@ -148,9 +148,9 @@ def getDomain(domain):
for hash in req["hashes"]:
ret.append({"types": ["md5"], "values": [hash]})
return ret
def introspection():
return mispattributes

View File

@ -1,7 +1,5 @@
import json
import requests
from requests import HTTPError
import base64
misperrors = {'error': 'Error'}
mispattributes = {'input': ['hostname', 'domain', 'ip-src', 'ip-dst', 'md5', 'sha1', 'sha256', 'sha512'],
@ -61,27 +59,27 @@ def get_domain(q):
continue
for result in results:
if flag == 1: #whois
if flag == 1: # whois
emails = result.get('whois', {}).get('emails')
if not emails:
continue
for em_type, email in emails.items():
ret.append({'types': ['whois-registrant-email'], 'values': [email], 'comment': desc.format(q, 'whois')})
if flag == 2: #pdns
if flag == 2: # pdns
ip = result.get('ip')
if ip:
ret.append({'types': ['ip-src', 'ip-dst'], 'values': [ip], 'comment': desc.format(q, 'pdns')})
if flag == 3: #uri
if flag == 3: # uri
uri = result.get('uri')
if uri:
ret.append({'types': ['url'], 'values': [uri], 'comment': desc.format(q, 'uri')})
if flag == 4: #samples
if flag == 4: # samples
if type(result) is str:
ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'samples')})
if flag == 5: #subdomains
if flag == 5: # subdomains
if type(result) is str:
ret.append({'types': ['domain'], 'values': [result], 'comment': desc.format(q, 'subdomain')})
if flag == 6: #reports
if flag == 6: # reports
link = result.get('URL')
if link:
ret.append({'types': ['url'], 'values': [link], 'comment': desc.format(q, 'report')})
@ -100,27 +98,27 @@ def get_ip(q):
continue
for result in results:
if flag == 1: #whois
if flag == 1: # whois
emails = result.get('whois', {}).get('emails')
if not emails:
continue
for em_type, email in emails.items():
ret.append({'types': ['whois-registrant-email'], 'values': [email], 'comment': desc.format(q, 'whois')})
if flag == 2: #pdns
if flag == 2: # pdns
ip = result.get('ip')
if ip:
ret.append({'types': ['ip-src', 'ip-dst'], 'values': [ip], 'comment': desc.format(q, 'pdns')})
if flag == 3: #uri
if flag == 3: # uri
uri = result.get('uri')
if uri:
ret.append({'types': ['url'], 'values': [uri], 'comment': desc.format(q, 'uri')})
if flag == 4: #samples
if flag == 4: # samples
if type(result) is str:
ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'samples')})
if flag == 5: #ssl
if flag == 5: # ssl
if type(result) is str:
ret.append({'types': ['x509-fingerprint-sha1'], 'values': [result], 'comment': desc.format(q, 'ssl')})
if flag == 6: #reports
if flag == 6: # reports
link = result.get('URL')
if link:
ret.append({'types': ['url'], 'values': [link], 'comment': desc.format(q, 'report')})
@ -139,11 +137,11 @@ def get_hash(q):
continue
for result in results:
if flag == 1: #meta (filename)
if flag == 1: # meta (filename)
name = result.get('file_name')
if name:
ret.append({'types': ['filename'], 'values': [name], 'comment': desc.format(q, 'file')})
if flag == 3: #network
if flag == 3: # network
domains = result.get('domains')
for dom in domains:
if dom.get('domain'):
@ -153,12 +151,12 @@ def get_hash(q):
for h in hosts:
if type(h) is str:
ret.append({'types': ['ip-src', 'ip-dst'], 'values': [h], 'comment': desc.format(q, 'network')})
if flag == 6: #detections
if flag == 6: # detections
detections = result.get('av_detections')
for d in detections:
if d.get('detection'):
ret.append({'types': ['text'], 'values': [d['detection']], 'comment': desc.format(q, 'detection')})
if flag == 7: #report
if flag == 7: # report
if type(result) is str:
ret.append({'types': ['sha256'], 'values': [result], 'comment': desc.format(q, 'report')})

View File

@ -169,8 +169,8 @@ def lookup_indicator(client, query):
'categories': ['External analysis'],
'values': image_url,
'comment': misp_comment})
### TO DO ###
### Add ability to add an in-line screenshot of the target website into an attribute
# ## TO DO ###
# ## Add ability to add an in-line screenshot of the target website into an attribute
# screenshot = requests.get(image_url).content
# r.append({'types': ['attachment'],
# 'categories': ['External analysis'],

View File

@ -19,6 +19,7 @@ moduleconfig = ["apikey", "event_limit"]
comment = '{}: Enriched via VirusTotal'
hash_types = ["md5", "sha1", "sha256", "sha512"]
class VirusTotalRequest(object):
def __init__(self, config):
self.apikey = config['apikey']
@ -146,6 +147,7 @@ class VirusTotalRequest(object):
self.to_return.append({"types": ["malware-sample"], "categories": ["Payload delivery"],
"values": data["submimssion_names"], "data": str(base64.b64encore(malsample), 'utf-8')})
def handler(q=False):
if q is False:
return False
@ -161,9 +163,11 @@ def handler(q=False):
return misperrors
return {'results': r}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -40,12 +40,12 @@ def handler(q=False):
data = request.get("data")
if 'malware-sample' in request:
# malicious samples are encrypted with zip (password infected) and then base64 encoded
sample_filename = request.get("malware-sample").split("|",1)[0]
sample_filename = request.get("malware-sample").split("|", 1)[0]
data = base64.b64decode(data)
fl = io.BytesIO(data)
zf = zipfile.ZipFile(fl)
sample_hashname = zf.namelist()[0]
data = zf.read(sample_hashname,b"infected")
data = zf.read(sample_hashname, b"infected")
zf.close()
elif 'attachment' in request:
# All attachments get base64 encoded
@ -55,7 +55,7 @@ def handler(q=False):
else:
misperrors['error'] = "No malware sample or attachment supplied"
return misperrors
except:
except Exception:
misperrors['error'] = "Unable to process submited sample data"
return misperrors
@ -102,7 +102,7 @@ def handler(q=False):
return misperrors
else:
return vmrayProcess(vmraydata)
except:
except Exception:
misperrors['error'] = "Problem when calling API."
return misperrors
else:
@ -148,7 +148,7 @@ def vmrayProcess(vmraydata):
else:
misperrors['error'] = "No valid results returned."
return misperrors
except:
except Exception:
misperrors['error'] = "No valid submission data returned."
return misperrors
else:

View File

@ -24,8 +24,8 @@ log.addHandler(ch)
misperrors = {'error': 'Error'}
mispattributes = {
'input': ['vulnerability'],
'output': ['text', 'link', 'cpe']}
'input': ['vulnerability'],
'output': ['text', 'link', 'cpe']}
moduleinfo = {'version': '0.1', 'author': 'Koen Van Impe',
'description': 'Query VulnDB - RiskBasedSecurity.com',
'module-type': ['expansion', 'hover']}
@ -61,7 +61,7 @@ def handler(q=False):
add_dates = True
add_ext_references = True
if request["config"].get("discard_dates") is not None and request["config"].get("discard_dates").lower() == "true":
if request["config"].get("discard_dates") is not None and request["config"].get("discard_dates").lower() == "true":
add_dates = False
if request["config"].get("discard_external_references") is not None and request["config"].get("discard_external_references").lower() == "true":
add_ext_references = False
@ -260,15 +260,15 @@ def handler(q=False):
values_text.append(vulnerability_classification)
# Finished processing the VulnDB reply; set the result for MISP
output['results'] += [{'types': 'text', 'values': values_text }]
output['results'] += [{'types': 'link', 'values': values_links }]
output['results'] += [{'types': 'text', 'values': values_text}]
output['results'] += [{'types': 'link', 'values': values_links}]
if add_cpe:
output['results'] += [{'types': 'cpe', 'values': values_cpe }]
output['results'] += [{'types': 'cpe', 'values': values_cpe}]
return output
else:
misperrors["error"] = "No information retrieved from VulnDB."
return misperrors
except:
except Exception:
misperrors["error"] = "Error while fetching information from VulnDB, wrong API keys?"
return misperrors

View File

@ -1,5 +1,4 @@
import json
import requests
import vulners
misperrors = {'error': 'Error'}
@ -48,7 +47,7 @@ def handler(q=False):
exploit_summary += " || " + str(len(vulners_exploits[0])) + " Public exploits available:\n "
for exploit in vulners_exploits[0]:
exploit_summary += exploit['title'] + " " + exploit['href'] + "\n "
exploit_summary += "|| Vulnerability Description: " + vuln_summary
exploit_summary += "|| Vulnerability Description: " + vuln_summary
summary = ai_summary + exploit_summary + vuln_summary

View File

@ -1,5 +1,4 @@
import json
import requests
from SPARQLWrapper import SPARQLWrapper, JSON
misperrors = {'error': 'Error'}
@ -20,10 +19,10 @@ def handler(q=False):
sparql = SPARQLWrapper(wiki_api_url)
query_string = \
"SELECT ?item \n" \
"WHERE { \n" \
"?item rdfs:label\"" + request.get('text') + "\" @en \n" \
"}\n";
"SELECT ?item \n" \
"WHERE { \n" \
"?item rdfs:label\"" + request.get('text') + "\" @en \n" \
"}\n"
sparql.setQuery(query_string)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
@ -47,4 +46,3 @@ def introspection():
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -5,17 +5,17 @@ import sys
BASEurl = "https://api.xforce.ibmcloud.com/"
extensions = {"ip1": "ipr/%s",
"ip2": "ipr/malware/%s",
"url": "url/%s",
"hash": "malware/%s",
"vuln": "/vulnerabilities/search/%s",
"dns": "resolve/%s"}
"ip2": "ipr/malware/%s",
"url": "url/%s",
"hash": "malware/%s",
"vuln": "/vulnerabilities/search/%s",
"dns": "resolve/%s"}
sys.path.append('./')
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'vulnerability', 'md5', 'sha1', 'sha256'],
'output': ['ip-src', 'ip-dst', 'text', 'domain']}
'output': ['ip-src', 'ip-dst', 'text', 'domain']}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Joerg Stephan (@johest)',
@ -24,78 +24,80 @@ moduleinfo = {'version': '1', 'author': 'Joerg Stephan (@johest)',
# config fields that your code expects from the site admin
moduleconfig = ["apikey", "event_limit"]
limit = 5000 #Default
limit = 5000 # Default
def MyHeader(key=False):
global limit
if key is False:
return None
global limit
if key is False:
return None
return {"Authorization": "Basic %s " % key,
"Accept": "application/json",
'User-Agent': 'Mozilla 5.0'}
return {"Authorization": "Basic %s " % key,
"Accept": "application/json",
'User-Agent': 'Mozilla 5.0'}
def handler(q=False):
global limit
if q is False:
return False
global limit
if q is False:
return False
q = json.loads(q)
q = json.loads(q)
key = q["config"]["apikey"]
limit = int(q["config"].get("event_limit", 5))
key = q["config"]["apikey"]
limit = int(q["config"].get("event_limit", 5))
r = {"results": []}
r = {"results": []}
if "ip-src" in q:
r["results"] += apicall("dns", q["ip-src"], key)
if "ip-dst" in q:
r["results"] += apicall("dns", q["ip-dst"], key)
if "md5" in q:
r["results"] += apicall("hash", q["md5"], key)
if "sha1" in q:
r["results"] += apicall("hash", q["sha1"], key)
if "sha256" in q:
r["results"] += apicall("hash", q["sha256"], key)
if 'vulnerability' in q:
r["results"] += apicall("vuln", q["vulnerability"], key)
if "domain" in q:
if "ip-src" in q:
r["results"] += apicall("dns", q["ip-src"], key)
if "ip-dst" in q:
r["results"] += apicall("dns", q["ip-dst"], key)
if "md5" in q:
r["results"] += apicall("hash", q["md5"], key)
if "sha1" in q:
r["results"] += apicall("hash", q["sha1"], key)
if "sha256" in q:
r["results"] += apicall("hash", q["sha256"], key)
if 'vulnerability' in q:
r["results"] += apicall("vuln", q["vulnerability"], key)
if "domain" in q:
r["results"] += apicall("dns", q["domain"], key)
uniq = []
for res in r["results"]:
if res not in uniq:
uniq.append(res)
r["results"] = uniq
return r
uniq = []
for res in r["results"]:
if res not in uniq:
uniq.append(res)
r["results"] = uniq
return r
def apicall(indicator_type, indicator, key=False):
try:
myURL = BASEurl + (extensions[str(indicator_type)])%indicator
jsondata = requests.get(myURL, headers=MyHeader(key)).json()
except:
jsondata = None
redata = []
#print(jsondata)
if not jsondata is None:
if indicator_type is "hash":
if "malware" in jsondata:
lopointer = jsondata["malware"]
redata.append({"type": "text", "values": lopointer["risk"]})
if indicator_type is "dns":
if "records" in str(jsondata):
lopointer = jsondata["Passive"]["records"]
for dataset in lopointer:
redata.append({"type":"domain", "values": dataset["value"]})
try:
myURL = BASEurl + (extensions[str(indicator_type)]) % indicator
jsondata = requests.get(myURL, headers=MyHeader(key)).json()
except Exception:
jsondata = None
redata = []
# print(jsondata)
if jsondata is not None:
if indicator_type is "hash":
if "malware" in jsondata:
lopointer = jsondata["malware"]
redata.append({"type": "text", "values": lopointer["risk"]})
if indicator_type is "dns":
if "records" in str(jsondata):
lopointer = jsondata["Passive"]["records"]
for dataset in lopointer:
redata.append({"type": "domain", "values": dataset["value"]})
return redata
return redata
def introspection():
return mispattributes
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -13,11 +13,13 @@ moduleinfo = {'version': '1', 'author': 'Christian STUDER',
moduleconfig = []
mispattributes = {'input': ['md5', 'sha1', 'sha256', 'filename|md5', 'filename|sha1', 'filename|sha256', 'imphash'], 'output': ['yara']}
def get_hash_condition(hashtype, hashvalue):
hashvalue = hashvalue.lower()
required_module, params = ('pe', '()') if hashtype == 'imphash' else ('hash', '(0, filesize)')
return '{}.{}{} == "{}"'.format(required_module, hashtype, params, hashvalue), required_module
def handler(q=False):
if q is False:
return False
@ -42,9 +44,11 @@ def handler(q=False):
return misperrors
return {'results': [{'types': mispattributes['output'], 'values': rule}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,5 +1,4 @@
import json
import requests
try:
import yara
except (OSError, ImportError):
@ -20,7 +19,7 @@ def handler(q=False):
return misperrors
try:
rules = yara.compile(source=request.get('yara'))
yara.compile(source=request.get('yara'))
summary = ("Syntax valid")
except Exception as e:
summary = ("Syntax error: " + str(e))

View File

@ -1 +1,2 @@
__all__ = ['cef_export','liteexport','goamlexport','threat_connect_export','pdfexport','threatStream_misp_export', 'osqueryexport']
__all__ = ['cef_export', 'liteexport', 'goamlexport', 'threat_connect_export', 'pdfexport',
'threatStream_misp_export', 'osqueryexport']

View File

@ -12,30 +12,32 @@ moduleinfo = {'version': '1', 'author': 'Hannah Ward',
# config fields that your code expects from the site admin
moduleconfig = ["Default_Severity", "Device_Vendor", "Device_Product", "Device_Version"]
cefmapping = {"ip-src":"src", "ip-dst":"dst", "hostname":"dhost", "domain":"dhost",
"md5":"fileHash", "sha1":"fileHash", "sha256":"fileHash",
"url":"request"}
cefmapping = {"ip-src": "src", "ip-dst": "dst", "hostname": "dhost", "domain": "dhost",
"md5": "fileHash", "sha1": "fileHash", "sha256": "fileHash",
"url": "request"}
mispattributes = {'input':list(cefmapping.keys())}
mispattributes = {'input': list(cefmapping.keys())}
outputFileExtension = "cef"
responseType = "application/txt"
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if "config" in request:
config = request["config"]
config = request["config"]
else:
config = {"Default_Severity":1, "Device_Vendor":"MISP", "Device_Product":"MISP", "Device_Version":1}
config = {"Default_Severity": 1, "Device_Vendor": "MISP",
"Device_Product": "MISP", "Device_Version": 1}
data = request["data"]
response = ""
for ev in data:
event = ev["Attribute"]
for attr in event:
if attr["type"] in cefmapping:
response += "{} host CEF:0|{}|{}|{}|{}|{}|{}|{}={}\n".format(
event = ev["Attribute"]
for attr in event:
if attr["type"] in cefmapping:
response += "{} host CEF:0|{}|{}|{}|{}|{}|{}|{}={}\n".format(
datetime.datetime.fromtimestamp(int(attr["timestamp"])).strftime("%b %d %H:%M:%S"),
config["Device_Vendor"],
config["Device_Product"],
@ -45,37 +47,37 @@ def handler(q=False):
config["Default_Severity"],
cefmapping[attr["type"]],
attr["value"],
)
)
r = {"response":[], "data":str(base64.b64encode(bytes(response, 'utf-8')), 'utf-8')}
r = {"response": [], "data": str(base64.b64encode(bytes(response, 'utf-8')), 'utf-8')}
return r
def introspection():
modulesetup = {}
try:
modulesetup = {}
try:
responseType
modulesetup['responseType'] = responseType
except NameError:
pass
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
except NameError:
pass
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -27,7 +27,7 @@ goAMLmapping = {'bank-account': {'bank-account': 't_account', 'institution-name'
'person': {'person': 't_person', 'text': 'comments', 'first-name': 'first_name',
'middle-name': 'middle_name', 'last-name': 'last_name', 'title': 'title',
'mothers-name': 'mothers_name', 'alias': 'alias', 'date-of-birth': 'birthdate',
'place-of-birth': 'birth_place', 'gender': 'gender','nationality': 'nationality1',
'place-of-birth': 'birth_place', 'gender': 'gender', 'nationality': 'nationality1',
'passport-number': 'passport_number', 'passport-country': 'passport_country',
'social-security-number': 'ssn', 'identity-card-number': 'id_number'},
'geolocation': {'geolocation': 'location', 'city': 'city', 'region': 'state',
@ -48,6 +48,7 @@ referencesMapping = {'bank-account': {'aml_type': '{}_account', 'bracket': 't_{}
'legal-entity': {'transaction': {'aml_type': '{}_entity', 'bracket': 't_{}'}, 'bank-account': {'aml_type': 't_entity'}},
'geolocation': {'aml_type': 'address', 'bracket': 'addresses'}}
class GoAmlGeneration(object):
def __init__(self, config):
self.config = config
@ -186,6 +187,7 @@ class GoAmlGeneration(object):
self.itterate(next_object_type, next_aml_type, uuid, xml_part)
self.xml[xml_part] += "</{}>".format(bracket)
def handler(q=False):
if q is False:
return False
@ -212,6 +214,7 @@ def handler(q=False):
exp_doc = "{}{}".format(export_doc.xml.get('header'), export_doc.xml.get('data'))
return {'response': [], 'data': str(base64.b64encode(bytes(exp_doc, 'utf-8')), 'utf-8')}
def introspection():
modulesetup = {}
try:
@ -236,6 +239,7 @@ def introspection():
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -4,9 +4,9 @@ import base64
misperrors = {'error': 'Error'}
moduleinfo = {'version': '1',
'author': 'TM',
'description': 'export lite',
'module-type': ['export']}
'author': 'TM',
'description': 'export lite',
'module-type': ['export']}
moduleconfig = ["indent_json_export"]
@ -14,76 +14,75 @@ mispattributes = {}
outputFileExtension = "json"
responseType = "application/json"
def handler(q=False):
if q is False:
return False
if q is False:
return False
request = json.loads(q)
request = json.loads(q)
config = {}
if "config" in request:
config = request["config"]
else:
config = {"indent_json_export" : None}
config = {}
if "config" in request:
config = request["config"]
else:
config = {"indent_json_export": None}
if config['indent_json_export'] is not None:
try:
config['indent_json_export'] = int(config['indent_json_export'])
except:
config['indent_json_export'] = None
if config['indent_json_export'] is not None:
try:
config['indent_json_export'] = int(config['indent_json_export'])
except Exception:
config['indent_json_export'] = None
if 'data' not in request:
return False
if 'data' not in request:
return False
#~ Misp json structur
liteEvent = {'Event':{}}
# ~ Misp json structur
liteEvent = {'Event': {}}
for evt in request['data']:
rawEvent = evt['Event']
liteEvent['Event']['info'] = rawEvent['info']
liteEvent['Event']['Attribute'] = []
for evt in request['data']:
rawEvent = evt['Event']
liteEvent['Event']['info'] = rawEvent['info']
liteEvent['Event']['Attribute'] = []
attrs = evt['Attribute']
for attr in attrs:
if 'Internal reference' not in attr['category']:
liteAttr = {}
liteAttr['category'] = attr['category']
liteAttr['type'] = attr['type']
liteAttr['value'] = attr['value']
liteEvent['Event']['Attribute'].append(liteAttr)
attrs = evt['Attribute']
for attr in attrs:
if 'Internal reference' not in attr['category']:
liteAttr = {}
liteAttr['category'] = attr['category']
liteAttr['type'] = attr['type']
liteAttr['value'] = attr['value']
liteEvent['Event']['Attribute'].append(liteAttr)
return {'response': [],
'data': str(base64.b64encode(bytes(
json.dumps(liteEvent, indent=config['indent_json_export']), 'utf-8')), 'utf-8')}
return {'response' : [],
'data' : str(base64.b64encode(
bytes(
json.dumps(liteEvent, indent=config['indent_json_export']),
'utf-8')),
'utf-8')
}
def introspection():
modulesetup = {}
try:
responseType
modulesetup['responseType'] = responseType
except NameError:
pass
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
modulesetup = {}
try:
responseType
modulesetup['responseType'] = responseType
except NameError:
pass
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -13,7 +13,7 @@ types_to_use = ['regkey', 'regkey|value', 'mutex', 'windows-service-displayname'
userConfig = {
};
}
moduleconfig = []
inputSource = ['event']
@ -26,6 +26,7 @@ moduleinfo = {'version': '1.0', 'author': 'Julien Bachmann, Hacknowledge',
'description': 'OSQuery query export module',
'module-type': ['export']}
def handle_regkey(value):
rep = {'HKCU': 'HKEY_USERS\\%', 'HKLM': 'HKEY_LOCAL_MACHINE'}
rep = dict((re.escape(k), v) for k, v in rep.items())
@ -33,6 +34,7 @@ def handle_regkey(value):
value = pattern.sub(lambda m: rep[re.escape(m.group(0))], value)
return 'SELECT * FROM registry WHERE path LIKE \'%s\';' % value
def handle_regkeyvalue(value):
key, value = value.split('|')
rep = {'HKCU': 'HKEY_USERS\\%', 'HKLM': 'HKEY_LOCAL_MACHINE'}
@ -41,27 +43,33 @@ def handle_regkeyvalue(value):
key = pattern.sub(lambda m: rep[re.escape(m.group(0))], key)
return 'SELECT * FROM registry WHERE path LIKE \'%s\' AND data LIKE \'%s\';' % (key, value)
def handle_mutex(value):
return 'SELECT * FROM winbaseobj WHERE object_name LIKE \'%s\';' % value
def handle_service(value):
return 'SELECT * FROM services WHERE display_name LIKE \'%s\' OR name like \'%s\';' % (value, value)
def handle_yara(value):
return 'not implemented yet, not sure it\'s easily feasible w/o dropping the sig on the hosts first'
def handle_scheduledtask(value):
return 'SELECT * FROM scheduled_tasks WHERE name LIKE \'%s\';' % value
handlers = {
'regkey' : handle_regkey,
'regkey|value' : handle_regkeyvalue,
'mutex' : handle_mutex,
'windows-service-displayname' : handle_service,
'windows-scheduled-task' : handle_scheduledtask,
'yara' : handle_yara
'regkey': handle_regkey,
'regkey|value': handle_regkeyvalue,
'mutex': handle_mutex,
'windows-service-displayname': handle_service,
'windows-scheduled-task': handle_scheduledtask,
'yara': handle_yara
}
def handler(q=False):
if q is False:
return False
@ -73,7 +81,7 @@ def handler(q=False):
for attribute in event["Attribute"]:
if attribute['type'] in types_to_use:
output = output + handlers[attribute['type']](attribute['value']) + '\n'
r = {"response":[], "data":str(base64.b64encode(bytes(output, 'utf-8')), 'utf-8')}
r = {"response": [], "data": str(base64.b64encode(bytes(output, 'utf-8')), 'utf-8')}
return r

View File

@ -126,7 +126,7 @@ class ReportGenerator():
summary = a.value
return title.format(internal_id=internal_id, title=self.misp_event.info,
summary=summary)
summary=summary)
def asciidoc(self, lang='en'):
self.report += self.title()

View File

@ -1,13 +1,12 @@
import json
import base64
import csv
misperrors = {'error': 'Error'}
userConfig = {
};
}
moduleconfig = []
@ -28,9 +27,9 @@ def handler(q=False):
if q is False:
return False
r = {'results': []}
result = json.loads(q)
output = ''; # Insert your magic here!
r = {"data":base64.b64encode(output.encode('utf-8')).decode('utf-8')}
result = json.loads(q) # noqa
output = '' # Insert your magic here!
r = {"data": base64.b64encode(output.encode('utf-8')).decode('utf-8')}
return r

View File

@ -49,10 +49,8 @@ def handler(q=False):
if q is False or not q:
return False
request = json.loads(q)
response = io.StringIO()
writer = csv.DictWriter(response, fieldnames=["value", "itype", "tags"])
writer.writeheader()

View File

@ -1,3 +1,3 @@
from . import _vmray
from . import _vmray # noqa
__all__ = ['vmray_import', 'ocr', 'cuckooimport', 'goamlimport', 'email_import', 'mispjson', 'openiocimport', 'threatanalyzer_import', 'csvimport']

View File

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
import json, os, base64
import json
import os
import base64
import pymisp
misperrors = {'error': 'Error'}
@ -9,18 +11,19 @@ moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
moduleconfig = []
inputSource = ['file']
userConfig = {'header': {
'type': 'String',
'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'},
'has_header':{
'type': 'Boolean',
'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, in the file (which will be skipped atm).'
}}
'type': 'String',
'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'},
'has_header': {
'type': 'Boolean',
'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, in the file (which will be skipped atm).'
}}
duplicatedFields = {'mispType': {'mispComment': 'comment'},
'attrField': {'attrComment': 'comment'}}
attributesFields = ['type', 'value', 'category', 'to_ids', 'comment', 'distribution']
delimiters = [',', ';', '|', '/', '\t', ' ']
class CsvParser():
def __init__(self, header, has_header):
self.header = header
@ -32,17 +35,17 @@ class CsvParser():
return_data = []
if self.fields_number == 1:
for line in data:
l = line.split('#')[0].strip()
if l:
return_data.append(l)
line = line.split('#')[0].strip()
if line:
return_data.append(line)
self.delimiter = None
else:
self.delimiter_count = dict([(d, 0) for d in delimiters])
for line in data:
l = line.split('#')[0].strip()
if l:
self.parse_delimiter(l)
return_data.append(l)
line = line.split('#')[0].strip()
if line:
self.parse_delimiter(line)
return_data.append(line)
# find which delimiter is used
self.delimiter = self.find_delimiter()
self.data = return_data[1:] if self.has_header else return_data
@ -115,6 +118,7 @@ class CsvParser():
# return list of indexes of the misp types, list of the misp types, remaining fields that will be attribute fields
return list2pop, misp, list(reversed(head))
def handler(q=False):
if q is False:
return False
@ -138,6 +142,7 @@ def handler(q=False):
r = {'results': csv_parser.attributes}
return r
def introspection():
modulesetup = {}
try:
@ -152,6 +157,7 @@ def introspection():
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,6 +1,4 @@
import json
import logging
import sys
import base64
misperrors = {'error': 'Error'}
@ -13,6 +11,7 @@ moduleinfo = {'version': '0.1', 'author': 'Victor van der Stoep',
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
@ -35,12 +34,12 @@ def handler(q=False):
targetFile = data['target']['file']
# Process the inital binary
processBinary(r, targetFile, initial = True)
processBinary(r, targetFile, initial=True)
# Get binary information for dropped files
if(data.get('dropped')):
for droppedFile in data['dropped']:
processBinary(r, droppedFile, dropped = True)
processBinary(r, droppedFile, dropped=True)
# Add malscore to results
r["results"].append({
@ -63,6 +62,7 @@ def handler(q=False):
# Return
return r
def processSummary(r, summary):
r["results"].append({
"values": summary['mutexes'],
@ -71,6 +71,7 @@ def processSummary(r, summary):
"comment": "Cuckoo analysis: Observed mutexes"
})
def processVT(r, virustotal):
category = "Antivirus detection"
comment = "VirusTotal analysis"
@ -114,7 +115,7 @@ def processNetwork(r, network):
})
def processBinary(r, target, initial = False, dropped = False):
def processBinary(r, target, initial=False, dropped=False):
if(initial):
comment = "Cuckoo analysis: Initial file"
category = "Payload delivery"
@ -187,6 +188,7 @@ def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
if __name__ == '__main__':
x = open('test.json', 'r')
q = []

View File

@ -115,7 +115,7 @@ def handler(q=False):
email_targets = set()
for rec in received:
try:
email_check = re.search("for\s(.*@.*);", rec).group(1)
email_check = re.search(r"for\s(.*@.*);", rec).group(1)
email_check = email_check.strip(' <>')
email_targets.add(parseaddr(email_check)[1])
except (AttributeError):
@ -166,7 +166,7 @@ def handler(q=False):
for ext in zipped_files:
if filename.endswith(ext) is True:
zipped_filetype = True
if zipped_filetype == False:
if not zipped_filetype:
try:
attachment_files += get_zipped_contents(filename, attachment_data)
except RuntimeError: # File is encrypted with a password
@ -294,7 +294,7 @@ def get_zip_passwords(message):
# Grab any strings that are marked off by special chars
marking_chars = [["\'", "\'"], ['"', '"'], ['[', ']'], ['(', ')']]
for char_set in marking_chars:
regex = re.compile("""\{0}([^\{1}]*)\{1}""".format(char_set[0], char_set[1]))
regex = re.compile(r"""\{0}([^\{1}]*)\{1}""".format(char_set[0], char_set[1]))
marked_off = re.findall(regex, raw_text)
possible_passwords += marked_off
@ -397,6 +397,7 @@ def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
if __name__ == '__main__':
with open('tests/test_no_attach.eml', 'r') as email_file:
handler(q=email_file.read())

View File

@ -1,6 +1,7 @@
import json, datetime, time, base64
import json
import time
import base64
import xml.etree.ElementTree as ET
from collections import defaultdict
from pymisp import MISPEvent, MISPObject
misperrors = {'error': 'Error'}
@ -11,12 +12,12 @@ moduleconfig = []
mispattributes = {'inputSource': ['file'], 'output': ['MISP objects']}
t_from_objects = {'nodes': ['from_person', 'from_account', 'from_entity'],
'leaves': ['from_funds_code', 'from_country']}
'leaves': ['from_funds_code', 'from_country']}
t_to_objects = {'nodes': ['to_person', 'to_account', 'to_entity'],
'leaves': ['to_funds_code', 'to_country']}
'leaves': ['to_funds_code', 'to_country']}
t_person_objects = {'nodes': ['addresses'],
'leaves': ['first_name', 'middle_name', 'last_name', 'gender', 'title', 'mothers_name', 'birthdate',
'passport_number', 'passport_country', 'id_number', 'birth_place', 'alias', 'nationality1']}
'leaves': ['first_name', 'middle_name', 'last_name', 'gender', 'title', 'mothers_name', 'birthdate',
'passport_number', 'passport_country', 'id_number', 'birth_place', 'alias', 'nationality1']}
t_account_objects = {'nodes': ['signatory'],
'leaves': ['institution_name', 'institution_code', 'swift', 'branch', 'non_banking_insitution',
'account', 'currency_code', 'account_name', 'iban', 'client_number', 'opened', 'closed',
@ -51,7 +52,7 @@ t_account_mapping = {'misp_name': 'bank-account', 'institution_name': 'instituti
t_person_mapping = {'misp_name': 'person', 'comments': 'text', 'first_name': 'first-name', 'middle_name': 'middle-name',
'last_name': 'last-name', 'title': 'title', 'mothers_name': 'mothers-name', 'alias': 'alias',
'birthdate': 'date-of-birth', 'birth_place': 'place-of-birth', 'gender': 'gender','nationality1': 'nationality',
'birthdate': 'date-of-birth', 'birth_place': 'place-of-birth', 'gender': 'gender', 'nationality1': 'nationality',
'passport_number': 'passport-number', 'passport_country': 'passport-country', 'ssn': 'social-security-number',
'id_number': 'identity-card-number'}
@ -73,6 +74,7 @@ goAMLmapping = {'from_account': t_account_mapping, 'to_account': t_account_mappi
nodes_to_ignore = ['addresses', 'signatory']
relationship_to_keep = ['signatory', 't_from', 't_from_my_client', 't_to', 't_to_my_client', 'address']
class GoAmlParser():
def __init__(self):
self.misp_event = MISPEvent()
@ -145,6 +147,7 @@ class GoAmlParser():
to_country_attribute = {'object_relation': 'to-country', 'value': to_country}
misp_object.add_attribute(**to_country_attribute)
def handler(q=False):
if q is False:
return False
@ -157,16 +160,18 @@ def handler(q=False):
aml_parser = GoAmlParser()
try:
aml_parser.read_xml(data)
except:
except Exception:
misperrors['error'] = "Impossible to read XML data"
return misperrors
aml_parser.parse_xml()
r = {'results': [obj.to_json() for obj in aml_parser.misp_event.objects]}
return r
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -2,7 +2,7 @@ import json
import base64
misperrors = {'error': 'Error'}
userConfig = { };
userConfig = {}
inputSource = ['file']
@ -19,23 +19,24 @@ def handler(q=False):
r = {'results': []}
request = json.loads(q)
try:
mfile = base64.b64decode(request["data"]).decode('utf-8')
misp = json.loads(mfile)
event = misp['response'][0]['Event']
for a in event["Attribute"]:
tmp = {}
tmp["values"] = a["value"]
tmp["categories"] = a["category"]
tmp["types"] = a["type"]
tmp["to_ids"] = a["to_ids"]
tmp["comment"] = a["comment"]
if a.get("data"):
tmp["data"] = a["data"]
r['results'].append(tmp)
except:
pass
mfile = base64.b64decode(request["data"]).decode('utf-8')
misp = json.loads(mfile)
event = misp['response'][0]['Event']
for a in event["Attribute"]:
tmp = {}
tmp["values"] = a["value"]
tmp["categories"] = a["category"]
tmp["types"] = a["type"]
tmp["to_ids"] = a["to_ids"]
tmp["comment"] = a["comment"]
if a.get("data"):
tmp["data"] = a["data"]
r['results'].append(tmp)
except Exception:
pass
return r
def introspection():
modulesetup = {}
try:
@ -55,6 +56,7 @@ def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
if __name__ == '__main__':
x = open('test.json', 'r')
r = handler(q=x.read())

View File

@ -14,7 +14,7 @@ ch.setFormatter(formatter)
log.addHandler(ch)
misperrors = {'error': 'Error'}
userConfig = {};
userConfig = {}
inputSource = ['file']
@ -55,17 +55,17 @@ def handler(q=False):
if document.format == 'PDF':
with document as pdf:
# Get number of pages
pages=len(pdf.sequence)
pages = len(pdf.sequence)
log.debug("PDF with {} page(s) detected".format(pages))
# Create new image object where the height will be the number of pages. With huge PDFs this will overflow, break, consume silly memory etc…
img = WImage(width=pdf.width, height=pdf.height * pages)
# Cycle through pages and stitch it together to one big file
for p in range(pages):
log.debug("Stitching page {}".format(p+1))
log.debug("Stitching page {}".format(p + 1))
image = img.composite(pdf.sequence[p], top=pdf.height * p, left=0)
# Create a png blob
image = img.make_blob('png')
log.debug("Final image size is {}x{}".format(pdf.width, pdf.height*(p+1)))
log.debug("Final image size is {}x{}".format(pdf.width, pdf.height * (p + 1)))
else:
image = document
@ -78,7 +78,6 @@ def handler(q=False):
misperrors['error'] = "Corrupt or not an image file."
return misperrors
ocrized = image_to_string(im)
freetext = {}
@ -107,6 +106,7 @@ def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
if __name__ == '__main__':
x = open('test.json', 'r')
handler(q=x.read())

View File

@ -1,28 +1,27 @@
import json
import base64
import csv
misperrors = {'error': 'Error'}
userConfig = {
'number1': {
'type': 'Integer',
'regex': '/^[0-4]$/i',
'errorMessage': 'Expected a number in range [0-4]',
'message': 'Column number used for value'
},
'some_string': {
'type': 'String',
'message': 'A text field'
},
'boolean_field': {
'type': 'Boolean',
'message': 'Boolean field test'
},
'comment': {
'type': 'Integer',
'message': 'Column number used for comment'
}
};
'number1': {
'type': 'Integer',
'regex': '/^[0-4]$/i',
'errorMessage': 'Expected a number in range [0-4]',
'message': 'Column number used for value'
},
'some_string': {
'type': 'String',
'message': 'A text field'
},
'boolean_field': {
'type': 'Boolean',
'message': 'Boolean field test'
},
'comment': {
'type': 'Integer',
'message': 'Column number used for comment'
}
}
inputSource = ['file', 'paste']
@ -39,8 +38,8 @@ def handler(q=False):
r = {'results': []}
request = json.loads(q)
request["data"] = base64.b64decode(request["data"])
fields = ["value", "category", "type", "comment"]
r = {"results":[{"values":["192.168.56.1"], "types":["ip-src"], "categories":["Network activity"]}]}
# fields = ["value", "category", "type", "comment"]
r = {"results": [{"values": ["192.168.56.1"], "types":["ip-src"], "categories": ["Network activity"]}]}
return r

View File

@ -90,7 +90,7 @@ def handler(q=False):
'values': sample_filename,
'data': base64.b64encode(file_data).decode(),
'type': 'malware-sample', 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': True, 'comment': ''})
except Exception as e:
except Exception:
# no 'sample' in archive, might be an url analysis, just ignore
pass
@ -171,7 +171,7 @@ def process_analysis_json(analysis_json):
if ':' in val:
try:
val_port = int(val.split(':')[1])
except ValueError as e:
except ValueError:
val_port = False
val_hostname = cleanup_hostname(val.split(':')[0])
val_ip = cleanup_ip(val.split(':')[0])

View File

@ -105,8 +105,8 @@ def handler(q=False):
url1 = "https://cloud.vmray.com/user/analysis/view?from_sample_id=%u" % sample_id
url2 = "&id=%u" % analysis_id
url3 = "&sub=%2Freport%2Foverview.html"
a_id["results"].append({ "values": url1 + url2 + url3, "types": "link" })
vmray_results = {'results': vmray_results["results"] + a_id["results"] }
a_id["results"].append({"values": url1 + url2 + url3, "types": "link"})
vmray_results = {'results': vmray_results["results"] + a_id["results"]}
# Clean up (remove doubles)
if vti_patterns_found:
vmray_results = vmrayCleanup(vmray_results)
@ -117,7 +117,7 @@ def handler(q=False):
else:
misperrors['error'] = "Unable to fetch sample id %u" % (sample_id)
return misperrors
except:
except Exception:
misperrors['error'] = "Unable to access VMRay API"
return misperrors
else:
@ -267,7 +267,7 @@ def vmrayGeneric(el, attr="", attrpos=1):
if content:
if attr:
# Some elements are put between \"\" ; replace them to single
content = content.replace("\"\"","\"")
content = content.replace("\"\"", "\"")
content_split = content.split("\"")
# Attributes are between open " and close "; so use >
if len(content_split) > attrpos: