2016-08-12 18:40:00 +02:00
|
|
|
import json
|
|
|
|
import requests
|
2017-03-05 18:59:36 +01:00
|
|
|
from requests import HTTPError
|
2016-08-12 18:40:00 +02:00
|
|
|
import base64
|
2018-08-31 21:38:53 +02:00
|
|
|
from collections import defaultdict
|
2016-08-12 18:40:00 +02:00
|
|
|
|
|
|
|
misperrors = {'error': 'Error'}
|
2017-02-10 14:16:39 +01:00
|
|
|
mispattributes = {'input': ['hostname', 'domain', "ip-src", "ip-dst", "md5", "sha1", "sha256", "sha512"],
|
2017-03-01 04:04:24 +01:00
|
|
|
'output': ['domain', "ip-src", "ip-dst", "text", "md5", "sha1", "sha256", "sha512", "ssdeep",
|
2018-08-31 21:38:53 +02:00
|
|
|
"authentihash", "filename"]}
|
2016-08-12 18:40:00 +02:00
|
|
|
|
|
|
|
# possible module-types: 'expansion', 'hover' or both
|
2018-08-31 21:38:53 +02:00
|
|
|
moduleinfo = {'version': '3', 'author': 'Hannah Ward',
|
2016-08-12 18:40:00 +02:00
|
|
|
'description': 'Get information from virustotal',
|
2016-08-15 12:09:40 +02:00
|
|
|
'module-type': ['expansion']}
|
2016-08-12 18:40:00 +02:00
|
|
|
|
|
|
|
# config fields that your code expects from the site admin
|
2016-08-17 14:01:11 +02:00
|
|
|
moduleconfig = ["apikey", "event_limit"]
|
2018-08-31 21:38:53 +02:00
|
|
|
comment = '{}: Enriched via VirusTotal'
|
|
|
|
hash_types = ["md5", "sha1", "sha256", "sha512"]
|
|
|
|
|
|
|
|
class VirusTotalRequest(object):
|
|
|
|
def __init__(self, config):
|
|
|
|
self.apikey = config['apikey']
|
|
|
|
self.limit = int(config.get('event_limit', 5))
|
|
|
|
self.base_url = "https://www.virustotal.com/vtapi/v2/{}/report"
|
|
|
|
self.results = defaultdict(set)
|
|
|
|
self.to_return = []
|
|
|
|
self.input_types_mapping = {'ip-src': self.get_ip, 'ip-dst': self.get_ip,
|
|
|
|
'domain': self.get_domain, 'hostname': self.get_domain,
|
|
|
|
'md5': self.get_hash, 'sha1': self.get_hash,
|
|
|
|
'sha256': self.get_hash, 'sha512': self.get_hash}
|
|
|
|
self.output_types_mapping = {'submission_names': 'filename', 'ssdeep': 'ssdeep',
|
|
|
|
'authentihash': 'authentihash', 'ITW_urls': 'url'}
|
|
|
|
|
2018-09-03 12:03:42 +02:00
|
|
|
def parse_request(self, q):
|
2018-09-03 14:30:33 +02:00
|
|
|
req_values = set()
|
2018-09-03 12:03:42 +02:00
|
|
|
for attribute_type, attribute_value in q.items():
|
2018-09-03 14:30:33 +02:00
|
|
|
req_values.add(attribute_value)
|
2018-09-03 12:03:42 +02:00
|
|
|
try:
|
|
|
|
error = self.input_types_mapping[attribute_type](attribute_value)
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
if error is not None:
|
|
|
|
return error
|
2018-08-31 21:38:53 +02:00
|
|
|
for key, values in self.results.items():
|
2018-09-03 14:30:33 +02:00
|
|
|
values = values.difference(req_values)
|
|
|
|
if values:
|
|
|
|
if isinstance(key, tuple):
|
|
|
|
types, comment = key
|
|
|
|
self.to_return.append({'types': list(types), 'values': list(values), 'comment': comment})
|
|
|
|
else:
|
|
|
|
self.to_return.append({'types': key, 'values': list(values)})
|
2018-08-31 21:38:53 +02:00
|
|
|
return self.to_return
|
2017-03-01 04:04:24 +01:00
|
|
|
|
2018-08-31 21:38:53 +02:00
|
|
|
def get_domain(self, domain, do_not_recurse=False):
|
|
|
|
req = requests.get(self.base_url.format('domain'), params={'domain': domain, 'apikey': self.apikey})
|
|
|
|
try:
|
|
|
|
req.raise_for_status()
|
|
|
|
req = req.json()
|
|
|
|
except HTTPError as e:
|
|
|
|
return str(e)
|
|
|
|
if req["response_code"] == 0:
|
|
|
|
# Nothing found
|
|
|
|
return []
|
|
|
|
if "resolutions" in req:
|
|
|
|
for res in req["resolutions"][:self.limit]:
|
|
|
|
ip_address = res["ip_address"]
|
|
|
|
self.results[(("ip-dst", "ip-src"), comment.format(domain))].add(ip_address)
|
|
|
|
# Pivot from here to find all domain info
|
|
|
|
if not do_not_recurse:
|
|
|
|
error = self.get_ip(ip_address, True)
|
|
|
|
if error is not None:
|
|
|
|
return error
|
|
|
|
self.get_more_info(req)
|
|
|
|
|
|
|
|
def get_hash(self, _hash):
|
|
|
|
req = requests.get(self.base_url.format('file'), params={'resource': _hash, 'apikey': self.apikey, 'allinfo': 1})
|
|
|
|
try:
|
|
|
|
req.raise_for_status()
|
|
|
|
req = req.json()
|
|
|
|
except HTTPError as e:
|
|
|
|
return str(e)
|
|
|
|
if req["response_code"] == 0:
|
|
|
|
# Nothing found
|
|
|
|
return []
|
|
|
|
self.get_more_info(req)
|
|
|
|
|
|
|
|
def get_ip(self, ip, do_not_recurse=False):
|
|
|
|
req = requests.get(self.base_url.format('ip-address'), params={'ip': ip, 'apikey': self.apikey})
|
|
|
|
try:
|
|
|
|
req.raise_for_status()
|
|
|
|
req = req.json()
|
|
|
|
except HTTPError as e:
|
|
|
|
return str(e)
|
|
|
|
if req["response_code"] == 0:
|
|
|
|
# Nothing found
|
|
|
|
return []
|
|
|
|
if "resolutions" in req:
|
|
|
|
for res in req["resolutions"][:self.limit]:
|
|
|
|
hostname = res["hostname"]
|
|
|
|
self.results[(("domain",), comment.format(ip))].add(hostname)
|
|
|
|
# Pivot from here to find all domain info
|
|
|
|
if not do_not_recurse:
|
|
|
|
error = self.get_domain(hostname, True)
|
|
|
|
if error is not None:
|
|
|
|
return error
|
|
|
|
self.get_more_info(req)
|
|
|
|
|
|
|
|
def find_all(self, data):
|
|
|
|
hashes = []
|
|
|
|
if isinstance(data, dict):
|
|
|
|
for key, value in data.items():
|
|
|
|
if key in hash_types:
|
|
|
|
self.results[key].add(value)
|
|
|
|
hashes.append(value)
|
|
|
|
else:
|
|
|
|
if isinstance(value, (dict, list)):
|
|
|
|
hashes.extend(self.find_all(value))
|
|
|
|
elif isinstance(data, list):
|
|
|
|
for d in data:
|
|
|
|
hashes.extend(self.find_all(d))
|
|
|
|
return hashes
|
|
|
|
|
|
|
|
def get_more_info(self, req):
|
|
|
|
# Get all hashes first
|
|
|
|
hashes = self.find_all(req)
|
|
|
|
for h in hashes[:self.limit]:
|
|
|
|
# Search VT for some juicy info
|
|
|
|
try:
|
2018-09-07 17:43:46 +02:00
|
|
|
data = requests.get(self.base_url.format('file'), params={'resource': h, 'apikey': self.apikey, 'allinfo': 1}).json()
|
2018-08-31 21:38:53 +02:00
|
|
|
except Exception:
|
|
|
|
continue
|
|
|
|
# Go through euch key and check if it exists
|
|
|
|
for VT_type, MISP_type in self.output_types_mapping.items():
|
|
|
|
if VT_type in data:
|
2018-09-25 17:10:19 +02:00
|
|
|
try:
|
|
|
|
self.results[((MISP_type,), comment.format(h))].add(data[VT_type])
|
|
|
|
except TypeError:
|
|
|
|
self.results[((MISP_type,), comment.format(h))].update(data[VT_type])
|
2018-08-31 21:38:53 +02:00
|
|
|
# Get the malware sample
|
2018-09-03 14:29:42 +02:00
|
|
|
sample = requests.get(self.base_url[:-6].format('file/download'), params={'hash': h, 'apikey': self.apikey})
|
2018-08-31 21:38:53 +02:00
|
|
|
malsample = sample.content
|
|
|
|
# It is possible for VT to not give us any submission names
|
|
|
|
if "submission_names" in data:
|
|
|
|
self.to_return.append({"types": ["malware-sample"], "categories": ["Payload delivery"],
|
|
|
|
"values": data["submimssion_names"], "data": str(base64.b64encore(malsample), 'utf-8')})
|
2016-08-12 18:40:00 +02:00
|
|
|
|
|
|
|
def handler(q=False):
|
|
|
|
if q is False:
|
|
|
|
return False
|
|
|
|
q = json.loads(q)
|
2018-08-31 21:38:53 +02:00
|
|
|
if not q.get('config') or not q['config'].get('apikey'):
|
2018-09-07 17:49:28 +02:00
|
|
|
misperrors['error'] = "A VirusTotal api key is required for this module."
|
2017-03-05 18:59:36 +01:00
|
|
|
return misperrors
|
2018-08-31 21:38:53 +02:00
|
|
|
del q['module']
|
|
|
|
query = VirusTotalRequest(q.pop('config'))
|
2018-09-03 12:03:42 +02:00
|
|
|
r = query.parse_request(q)
|
2018-08-31 21:38:53 +02:00
|
|
|
if isinstance(r, str):
|
|
|
|
misperrors['error'] = r
|
2017-03-05 18:59:36 +01:00
|
|
|
return misperrors
|
2018-08-31 21:38:53 +02:00
|
|
|
return {'results': r}
|
2017-03-01 04:04:24 +01:00
|
|
|
|
2016-08-12 18:40:00 +02:00
|
|
|
def introspection():
|
|
|
|
return mispattributes
|
|
|
|
|
|
|
|
def version():
|
|
|
|
moduleinfo['config'] = moduleconfig
|
|
|
|
return moduleinfo
|