2016-11-13 21:43:59 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
'''
|
2020-04-23 14:47:48 +02:00
|
|
|
Submit sample to VMRay.
|
2016-11-13 21:43:59 +01:00
|
|
|
|
2019-05-01 22:44:24 +02:00
|
|
|
Requires "vmray_rest_api"
|
2016-11-13 21:43:59 +01:00
|
|
|
|
2019-05-01 22:44:24 +02:00
|
|
|
The expansion module vmray_submit and import module vmray_import are a two step
|
|
|
|
process to import data from VMRay.
|
|
|
|
You can automate this by setting the PyMISP example script 'vmray_automation'
|
|
|
|
as a cron job
|
2016-11-13 21:43:59 +01:00
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
import json
|
|
|
|
import base64
|
2020-04-23 14:47:48 +02:00
|
|
|
from distutils.util import strtobool
|
2016-11-13 21:43:59 +01:00
|
|
|
|
|
|
|
import io
|
2016-11-18 18:23:52 +01:00
|
|
|
import zipfile
|
2016-11-13 21:43:59 +01:00
|
|
|
|
2016-11-15 16:43:11 +01:00
|
|
|
from ._vmray.vmray_rest_api import VMRayRESTAPI
|
|
|
|
|
2016-11-13 21:43:59 +01:00
|
|
|
misperrors = {'error': 'Error'}
|
2016-11-18 18:23:52 +01:00
|
|
|
mispattributes = {'input': ['attachment', 'malware-sample'], 'output': ['text', 'sha1', 'sha256', 'md5', 'link']}
|
2020-04-23 14:47:48 +02:00
|
|
|
moduleinfo = {'version': '0.3', 'author': 'Koen Van Impe',
|
2016-11-13 21:43:59 +01:00
|
|
|
'description': 'Submit a sample to VMRay',
|
|
|
|
'module-type': ['expansion']}
|
|
|
|
moduleconfig = ['apikey', 'url', 'shareable', 'do_not_reanalyze', 'do_not_include_vmrayjobids']
|
|
|
|
|
|
|
|
|
|
|
|
include_vmrayjobids = False
|
|
|
|
|
|
|
|
|
|
|
|
def handler(q=False):
|
|
|
|
global include_vmrayjobids
|
|
|
|
|
|
|
|
if q is False:
|
|
|
|
return False
|
|
|
|
request = json.loads(q)
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = request.get("data")
|
2016-11-18 18:23:52 +01:00
|
|
|
if 'malware-sample' in request:
|
|
|
|
# malicious samples are encrypted with zip (password infected) and then base64 encoded
|
2018-12-11 15:29:09 +01:00
|
|
|
sample_filename = request.get("malware-sample").split("|", 1)[0]
|
2016-11-18 18:23:52 +01:00
|
|
|
data = base64.b64decode(data)
|
|
|
|
fl = io.BytesIO(data)
|
|
|
|
zf = zipfile.ZipFile(fl)
|
|
|
|
sample_hashname = zf.namelist()[0]
|
2018-12-11 15:29:09 +01:00
|
|
|
data = zf.read(sample_hashname, b"infected")
|
2016-11-18 18:23:52 +01:00
|
|
|
zf.close()
|
|
|
|
elif 'attachment' in request:
|
|
|
|
# All attachments get base64 encoded
|
|
|
|
sample_filename = request.get("attachment")
|
|
|
|
data = base64.b64decode(data)
|
|
|
|
|
|
|
|
else:
|
|
|
|
misperrors['error'] = "No malware sample or attachment supplied"
|
|
|
|
return misperrors
|
2018-12-11 15:29:09 +01:00
|
|
|
except Exception:
|
2016-11-13 21:43:59 +01:00
|
|
|
misperrors['error'] = "Unable to process submited sample data"
|
|
|
|
return misperrors
|
|
|
|
|
|
|
|
if (request["config"].get("apikey") is None) or (request["config"].get("url") is None):
|
|
|
|
misperrors["error"] = "Missing API key or server URL (hint: try cloud.vmray.com)"
|
|
|
|
return misperrors
|
|
|
|
|
|
|
|
api = VMRayRESTAPI(request["config"].get("url"), request["config"].get("apikey"), False)
|
|
|
|
|
|
|
|
shareable = request["config"].get("shareable")
|
|
|
|
do_not_reanalyze = request["config"].get("do_not_reanalyze")
|
|
|
|
do_not_include_vmrayjobids = request["config"].get("do_not_include_vmrayjobids")
|
|
|
|
|
2020-04-23 14:47:48 +02:00
|
|
|
try:
|
2020-05-01 05:12:33 +02:00
|
|
|
shareable = bool(strtobool(shareable)) # Do we want the sample to be shared?
|
|
|
|
reanalyze = not bool(strtobool(do_not_reanalyze)) # Always reanalyze the sample?
|
|
|
|
include_vmrayjobids = not bool(strtobool(do_not_include_vmrayjobids)) # Include the references to VMRay job IDs
|
2020-04-23 14:47:48 +02:00
|
|
|
except ValueError:
|
|
|
|
misperrors["error"] = "Error while processing settings. Please double-check your values."
|
|
|
|
return misperrors
|
2016-11-13 21:43:59 +01:00
|
|
|
|
2016-11-18 18:23:52 +01:00
|
|
|
if data and sample_filename:
|
2016-11-13 21:43:59 +01:00
|
|
|
args = {}
|
|
|
|
args["shareable"] = shareable
|
2016-11-18 18:23:52 +01:00
|
|
|
args["sample_file"] = {'data': io.BytesIO(data), 'filename': sample_filename}
|
2016-11-13 21:43:59 +01:00
|
|
|
args["reanalyze"] = reanalyze
|
|
|
|
|
|
|
|
try:
|
|
|
|
vmraydata = vmraySubmit(api, args)
|
2020-04-23 14:47:48 +02:00
|
|
|
if vmraydata["errors"] and "Submission not stored" not in vmraydata["errors"][0]["error_msg"]:
|
2016-11-13 21:43:59 +01:00
|
|
|
misperrors['error'] = "VMRay: %s" % vmraydata["errors"][0]["error_msg"]
|
|
|
|
return misperrors
|
|
|
|
else:
|
|
|
|
return vmrayProcess(vmraydata)
|
2018-12-11 15:29:09 +01:00
|
|
|
except Exception:
|
2016-11-13 21:43:59 +01:00
|
|
|
misperrors['error'] = "Problem when calling API."
|
|
|
|
return misperrors
|
|
|
|
else:
|
|
|
|
misperrors['error'] = "No sample data or filename."
|
|
|
|
return misperrors
|
|
|
|
|
|
|
|
|
|
|
|
def introspection():
|
|
|
|
return mispattributes
|
|
|
|
|
|
|
|
|
|
|
|
def version():
|
|
|
|
moduleinfo['config'] = moduleconfig
|
|
|
|
return moduleinfo
|
|
|
|
|
|
|
|
|
|
|
|
def vmrayProcess(vmraydata):
|
|
|
|
''' Process the JSON file returned by vmray'''
|
|
|
|
if vmraydata:
|
|
|
|
try:
|
2020-04-23 14:47:48 +02:00
|
|
|
sample = vmraydata["samples"][0]
|
2016-11-13 21:43:59 +01:00
|
|
|
jobs = vmraydata["jobs"]
|
|
|
|
|
|
|
|
# Result received?
|
2020-04-23 14:47:48 +02:00
|
|
|
if sample:
|
2016-11-13 21:43:59 +01:00
|
|
|
r = {'results': []}
|
2020-04-23 14:47:48 +02:00
|
|
|
r['results'].append({'types': 'md5', 'values': sample['sample_md5hash']})
|
|
|
|
r['results'].append({'types': 'sha1', 'values': sample['sample_sha1hash']})
|
|
|
|
r['results'].append({'types': 'sha256', 'values': sample['sample_sha256hash']})
|
|
|
|
r['results'].append({'types': 'text', 'values': 'VMRay Sample ID: %s' % sample['sample_id'], 'tags': 'workflow:state="incomplete"'})
|
|
|
|
r['results'].append({'types': 'link', 'values': sample['sample_webif_url']})
|
2016-11-13 21:43:59 +01:00
|
|
|
|
|
|
|
# Include data from different jobs
|
2020-04-23 14:47:48 +02:00
|
|
|
if include_vmrayjobids and len(jobs) > 0:
|
2016-11-13 21:43:59 +01:00
|
|
|
for job in jobs:
|
|
|
|
job_id = job["job_id"]
|
|
|
|
job_vm_name = job["job_vm_name"]
|
|
|
|
job_configuration_name = job["job_configuration_name"]
|
2016-11-15 16:43:11 +01:00
|
|
|
r["results"].append({"types": "text", "values": "VMRay Job ID %s (%s - %s)" % (job_id, job_vm_name, job_configuration_name)})
|
2016-11-13 21:43:59 +01:00
|
|
|
return r
|
|
|
|
else:
|
|
|
|
misperrors['error'] = "No valid results returned."
|
|
|
|
return misperrors
|
2018-12-11 15:29:09 +01:00
|
|
|
except Exception:
|
2016-11-13 21:43:59 +01:00
|
|
|
misperrors['error'] = "No valid submission data returned."
|
|
|
|
return misperrors
|
|
|
|
else:
|
|
|
|
misperrors['error'] = "Unable to parse results."
|
|
|
|
return misperrors
|
|
|
|
|
|
|
|
|
|
|
|
def vmraySubmit(api, args):
|
|
|
|
''' Submit the sample to VMRay'''
|
|
|
|
vmraydata = api.call("POST", "/rest/sample/submit", args)
|
|
|
|
return vmraydata
|