mirror of https://github.com/MISP/misp-modules
Merged with current master
commit
08261366b7
|
@ -1,7 +1,5 @@
|
|||
language: python
|
||||
|
||||
cache: pip
|
||||
|
||||
services:
|
||||
- redis-server
|
||||
|
||||
|
|
13
README.md
13
README.md
|
@ -22,7 +22,9 @@ For more information: [Extending MISP with Python modules](https://www.circl.lu/
|
|||
* [CIRCL Passive DNS](misp_modules/modules/expansion/circl_passivedns.py) - a hover and expansion module to expand hostname and IP addresses with passive DNS information.
|
||||
* [CVE](misp_modules/modules/expansion/cve.py) - a hover module to give more information about a vulnerability (CVE).
|
||||
* [DNS](misp_modules/modules/expansion/dns.py) - a simple module to resolve MISP attributes like hostname and domain to expand IP addresses attributes.
|
||||
* [DomainTools](misp_modules/modules/expansion/domaintools.py) - a hover and expansion module to get information from [DomainTools](http://www.domaintools.com/) whois.
|
||||
* [EUPI](misp_modules/modules/expansion/eupi.py) - a hover and expansion module to get information about an URL from the [Phishing Initiative project](https://phishing-initiative.eu/?lang=en).
|
||||
* [GeoIP](misp_modules/modules/expansion/geoip_country.py) - a hover and expansion module to get GeoIP information from geolite/maxmind.
|
||||
* [IPASN](misp_modules/modules/expansion/ipasn.py) - a hover and expansion to get the BGP ASN of an IP address.
|
||||
* [passivetotal](misp_modules/modules/expansion/passivetotal.py) - a [passivetotal](https://www.passivetotal.org/) module that queries a number of different PassiveTotal datasets.
|
||||
* [sourcecache](misp_modules/modules/expansion/sourcecache.py) - a module to cache a specific link from a MISP instance.
|
||||
|
@ -38,17 +40,19 @@ For more information: [Extending MISP with Python modules](https://www.circl.lu/
|
|||
* [OCR](misp_modules/modules/import_mod/ocr.py) Optical Character Recognition (OCR) module for MISP to import attributes from images, scan or faxes.
|
||||
* [stiximport](misp_modules/modules/import_mod/stiximport.py) - An import module to process STIX xml/json
|
||||
* [Email Import](misp_modules/modules/import_mod/email_import.py) Email import module for MISP to import basic metadata.
|
||||
* [VMRay](misp_modules/modules/import_mod/vmray_import.py) - An import module to process VMRay export
|
||||
|
||||
## How to install and start MISP modules?
|
||||
|
||||
~~~~bash
|
||||
sudo apt-get install python3-dev python3-pip libpq5
|
||||
sudo apt-get install python3-dev python3-pip libpq5 libjpeg-dev
|
||||
cd /usr/local/src/
|
||||
sudo git clone https://github.com/MISP/misp-modules.git
|
||||
cd misp-modules
|
||||
sudo pip3 install --upgrade -r REQUIREMENTS
|
||||
sudo pip3 install --upgrade .
|
||||
sudo pip3 install -I -r REQUIREMENTS
|
||||
sudo pip3 install -I .
|
||||
sudo vi /etc/rc.local, add this line: `sudo -u www-data misp-modules -s &`
|
||||
/usr/local/bin/misp-modules #to start the modules
|
||||
~~~~
|
||||
|
||||
## How to add your own MISP modules?
|
||||
|
@ -178,11 +182,12 @@ If the binary file is malware you can use 'malware-sample' as the type. If you d
|
|||
|
||||
### Module type
|
||||
|
||||
A MISP module can be of three types:
|
||||
A MISP module can be of four types:
|
||||
|
||||
- **expansion** - service related to an attribute that can be used to extend and update an existing event.
|
||||
- **hover** - service related to an attribute to provide additional information to the users without updating the event.
|
||||
- **import** - service related to importing and parsing an external object that can be used to extend an existing event.
|
||||
- **export** - service related to exporting an object, event, or data.
|
||||
|
||||
module-type is an array where the list of supported types can be added.
|
||||
|
||||
|
|
|
@ -12,6 +12,10 @@ pyeupi
|
|||
ipasn-redis
|
||||
asnhistory
|
||||
git+https://github.com/Rafiot/uwhoisd.git@testing#egg=uwhois&subdirectory=client
|
||||
git+https://github.com/MISP/MISP-STIX-Converter.git#egg=misp_stix_converter
|
||||
git+https://github.com/CIRCL/PyMISP.git#egg=pymisp
|
||||
pillow
|
||||
pytesseract
|
||||
SPARQLWrapper
|
||||
domaintools_api
|
||||
pygeoip
|
||||
|
|
|
@ -123,6 +123,8 @@ def load_modules(mod_dir):
|
|||
if os.path.basename(root).startswith("."):
|
||||
continue
|
||||
for filename in fnmatch.filter(filenames, '*.py'):
|
||||
if root.split('/')[-1].startswith('_'):
|
||||
continue
|
||||
if filename == '__init__.py':
|
||||
continue
|
||||
modulename = filename.split(".")[0]
|
||||
|
@ -145,7 +147,7 @@ def load_package_modules():
|
|||
mhandlers = {}
|
||||
modules = []
|
||||
for path, module in sys.modules.items():
|
||||
r = re.findall("misp_modules[.]modules[.](\w+)[.](\w+)", path)
|
||||
r = re.findall("misp_modules[.]modules[.](\w+)[.]([^_]\w+)", path)
|
||||
if r and len(r[0]) == 2:
|
||||
moduletype, modulename = r[0]
|
||||
mhandlers[modulename] = module
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
__all__ = ['asn_history', 'circl_passivedns', 'circl_passivessl', 'countrycode', 'cve', 'dns',
|
||||
'eupi', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
||||
from . import _vmray
|
||||
|
||||
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
|
||||
'countrycode', 'cve', 'dns', 'domaintools', 'eupi', 'ipasn', 'passivetotal', 'sourcecache',
|
||||
'virustotal', 'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki']
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Python client library for VMRay REST API"""
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import os.path
|
||||
import requests
|
||||
import urllib.parse
|
||||
|
||||
# disable nasty certification warning
|
||||
# pylint: disable=no-member
|
||||
try:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
except AttributeError:
|
||||
try:
|
||||
import urllib3
|
||||
try:
|
||||
urllib3.disable_warnings()
|
||||
except AttributeError:
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# pylint: disable=
|
||||
|
||||
|
||||
class VMRayRESTAPIError(Exception):
|
||||
"""Exception class that is used when API returns an error"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.status_code = kwargs.pop("status_code", None)
|
||||
Exception.__init__(self, *args, **kwargs)
|
||||
|
||||
|
||||
def handle_rest_api_result(result):
|
||||
"""Handle result of API request (check for errors)"""
|
||||
|
||||
if (result.status_code < 200) or (result.status_code > 299):
|
||||
try:
|
||||
json_result = result.json()
|
||||
except ValueError:
|
||||
raise VMRayRESTAPIError("API returned error %u: %s" % (result.status_code, result.text), status_code=result.status_code)
|
||||
|
||||
raise VMRayRESTAPIError(json_result.get("error_msg", "Unknown error"), status_code=result.status_code)
|
||||
|
||||
|
||||
class VMRayRESTAPI(object):
|
||||
"""VMRay REST API class"""
|
||||
|
||||
def __init__(self, server, api_key, verify_cert=True):
|
||||
# split server URL into components
|
||||
url_desc = urllib.parse.urlsplit(server)
|
||||
|
||||
# assume HTTPS if no scheme is specified
|
||||
if url_desc.scheme == "":
|
||||
server = "https://" + server
|
||||
|
||||
# save variables
|
||||
self.server = server
|
||||
self.api_key = api_key
|
||||
self.verify_cert = verify_cert
|
||||
|
||||
def call(self, http_method, api_path, params=None, raw_data=False):
|
||||
"""Call VMRay REST API"""
|
||||
|
||||
# get function of requests package
|
||||
requests_func = getattr(requests, http_method.lower())
|
||||
|
||||
# parse parameters
|
||||
req_params = {}
|
||||
file_params = {}
|
||||
|
||||
if params is not None:
|
||||
for key, value in params.items():
|
||||
if isinstance(value, (datetime.date,
|
||||
datetime.datetime,
|
||||
float,
|
||||
int)):
|
||||
req_params[key] = str(value)
|
||||
elif isinstance(value, str):
|
||||
req_params[key] = str(value)
|
||||
elif isinstance(value, dict):
|
||||
filename = value["filename"]
|
||||
sample = value["data"]
|
||||
file_params[key] = (filename, sample, "application/octet-stream")
|
||||
elif hasattr(value, "read"):
|
||||
filename = os.path.split(value.name)[1]
|
||||
# For the following block refer to DEV-1820
|
||||
try:
|
||||
filename.decode("ASCII")
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
b64_key = key + "name_b64enc"
|
||||
byte_value = filename.encode("utf-8")
|
||||
b64_value = base64.b64encode(byte_value)
|
||||
|
||||
filename = "@param=%s" % b64_key
|
||||
req_params[b64_key] = b64_value
|
||||
file_params[key] = (filename, value, "application/octet-stream")
|
||||
else:
|
||||
raise VMRayRESTAPIError("Parameter \"%s\" has unknown type \"%s\"" % (key, type(value)))
|
||||
|
||||
# construct request
|
||||
if file_params:
|
||||
files = file_params
|
||||
else:
|
||||
files = None
|
||||
|
||||
# we need to adjust some stuff for POST requests
|
||||
if http_method.lower() == "post":
|
||||
req_data = req_params
|
||||
req_params = None
|
||||
else:
|
||||
req_data = None
|
||||
|
||||
# do request
|
||||
result = requests_func(self.server + api_path, data=req_data, params=req_params, headers={"Authorization": "api_key " + self.api_key}, files=files, verify=self.verify_cert, stream=raw_data)
|
||||
handle_rest_api_result(result)
|
||||
|
||||
if raw_data:
|
||||
return result.raw
|
||||
|
||||
# parse result
|
||||
try:
|
||||
json_result = result.json()
|
||||
except ValueError:
|
||||
raise ValueError("API returned invalid JSON: %s" % (result.text))
|
||||
|
||||
# if there are no cached elements then return the data
|
||||
if "continuation_id" not in json_result:
|
||||
return json_result.get("data", None)
|
||||
|
||||
data = json_result["data"]
|
||||
|
||||
# get cached results
|
||||
while "continuation_id" in json_result:
|
||||
# send request to server
|
||||
result = requests.get("%s/rest/continuation/%u" % (self.server, json_result["continuation_id"]), headers={"Authorization": "api_key " + self.api_key}, verify=self.verify_cert)
|
||||
handle_rest_api_result(result)
|
||||
|
||||
# parse result
|
||||
try:
|
||||
json_result = result.json()
|
||||
except ValueError:
|
||||
raise ValueError("API returned invalid JSON: %s" % (result.text))
|
||||
|
||||
data.extend(json_result["data"])
|
||||
|
||||
return data
|
|
@ -0,0 +1,279 @@
|
|||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from domaintools import API
|
||||
|
||||
|
||||
log = logging.getLogger('domaintools')
|
||||
log.setLevel(logging.DEBUG)
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
log.addHandler(ch)
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {
|
||||
'input': ['domain', 'email-src', 'email-dst', 'target-email', 'whois-registrant-email',
|
||||
'whois-registrant-name', 'whois-registrant-phone', 'ip-src', 'ip-dst'],
|
||||
'output': ['whois-registrant-email', 'whois-registrant-phone', 'whois-registrant-name',
|
||||
'whois-registrar', 'whois-creation-date', 'freetext', 'domain']
|
||||
}
|
||||
|
||||
moduleinfo = {
|
||||
'version': '0.1',
|
||||
'author': 'Raphaël Vinot',
|
||||
'description': 'DomainTools MISP expansion module.',
|
||||
'module-type': ['expansion', 'hover']
|
||||
}
|
||||
|
||||
moduleconfig = ['username', 'api_key']
|
||||
|
||||
query_profiles = [
|
||||
{'inputs': ['domain'], 'services': ['parsed_whois', 'domain_profile', 'reputation', 'reverse_ip']},
|
||||
{'inputs': ['email-src', 'email-dst', 'target-email', 'whois-registrant-email', 'whois-registrant-name', 'whois-registrant-phone'], 'services': ['reverse_whois']},
|
||||
{'inputs': ['ip-src', 'ip-dst'], 'services': ['host_domains']}
|
||||
]
|
||||
|
||||
|
||||
class DomainTools(object):
|
||||
|
||||
def __init__(self):
|
||||
self.reg_mail = {}
|
||||
self.reg_phone = {}
|
||||
self.reg_name = {}
|
||||
self.registrar = {}
|
||||
self.creation_date = {}
|
||||
self.domain_ip = {}
|
||||
self.domain = {}
|
||||
self.risk = ()
|
||||
self.freetext = ''
|
||||
|
||||
def _add_value(self, value_type, value, comment):
|
||||
if value_type.get(value):
|
||||
if comment and comment not in value_type[value]:
|
||||
value_type[value] += ' - {}'.format(comment)
|
||||
else:
|
||||
value_type[value] = comment or ''
|
||||
return value_type
|
||||
|
||||
def add_mail(self, mail, comment=None):
|
||||
self.reg_mail = self._add_value(self.reg_mail, mail, comment)
|
||||
|
||||
def add_phone(self, phone, comment=None):
|
||||
self.reg_phone = self._add_value(self.reg_phone, phone, comment)
|
||||
|
||||
def add_name(self, name, comment=None):
|
||||
self.reg_name = self._add_value(self.reg_name, name, comment)
|
||||
|
||||
def add_registrar(self, reg, comment=None):
|
||||
self.registrar = self._add_value(self.registrar, reg, comment)
|
||||
|
||||
def add_creation_date(self, date, comment=None):
|
||||
self.creation_date = self._add_value(self.creation_date, date, comment)
|
||||
|
||||
def add_ip(self, ip, comment=None):
|
||||
self.domain_ip = self._add_value(self.domain_ip, ip, comment)
|
||||
|
||||
def add_domain(self, domain, comment=None):
|
||||
self.domain = self._add_value(self.domain, domain, comment)
|
||||
|
||||
def dump(self):
|
||||
to_return = []
|
||||
if self.reg_mail:
|
||||
for mail, comment in self.reg_mail.items():
|
||||
to_return.append({'type': 'whois-registrant-email', 'values': [mail], 'comment': comment or ''})
|
||||
if self.reg_phone:
|
||||
for phone, comment in self.reg_phone.items():
|
||||
to_return.append({'type': 'whois-registrant-phone', 'values': [phone], 'comment': comment or ''})
|
||||
if self.reg_name:
|
||||
for name, comment in self.reg_name.items():
|
||||
to_return.append({'type': 'whois-registrant-name', 'values': [name], 'comment': comment or ''})
|
||||
if self.registrar:
|
||||
for reg, comment in self.registrar.items():
|
||||
to_return.append({'type': 'whois-registrar', 'values': [reg], 'comment': comment or ''})
|
||||
if self.creation_date:
|
||||
for date, comment in self.creation_date.items():
|
||||
to_return.append({'type': 'whois-creation-date', 'values': [date], 'comment': comment or ''})
|
||||
if self.domain_ip:
|
||||
for ip, comment in self.domain_ip.items():
|
||||
to_return.append({'types': ['ip-dst', 'ip-src'], 'values': [ip], 'comment': comment or ''})
|
||||
if self.domain:
|
||||
for domain, comment in self.domain.items():
|
||||
to_return.append({'type': 'domain', 'values': [domain], 'comment': comment or ''})
|
||||
if self.freetext:
|
||||
to_return.append({'type': 'freetext', 'values': [self.freetext], 'comment': 'Freetext import'})
|
||||
if self.risk:
|
||||
to_return.append({'type': 'text', 'values': [self.risk[0]], 'comment': self.risk[1]})
|
||||
return to_return
|
||||
|
||||
|
||||
def parsed_whois(domtools, to_query, values):
|
||||
whois_entry = domtools.parsed_whois(to_query)
|
||||
if whois_entry.get('error'):
|
||||
misperrors['error'] = whois_entry['error']['message']
|
||||
return misperrors
|
||||
|
||||
if whois_entry.get('registrant'):
|
||||
values.add_name(whois_entry['registrant'], 'Parsed registrant')
|
||||
|
||||
if whois_entry.get('registration'):
|
||||
values.add_creation_date(whois_entry['registration']['created'], 'timestamp')
|
||||
|
||||
if whois_entry.get('whois'):
|
||||
values.freetext = whois_entry['whois']['record']
|
||||
if whois_entry.get('parsed_whois'):
|
||||
if whois_entry['parsed_whois']['created_date']:
|
||||
values.add_creation_date(whois_entry['parsed_whois']['created_date'], 'created')
|
||||
if whois_entry['parsed_whois']['registrar']['name']:
|
||||
values.add_registrar(whois_entry['parsed_whois']['registrar']['name'], 'name')
|
||||
if whois_entry['parsed_whois']['registrar']['url']:
|
||||
values.add_registrar(whois_entry['parsed_whois']['registrar']['url'], 'url')
|
||||
if whois_entry['parsed_whois']['registrar']['iana_id']:
|
||||
values.add_registrar(whois_entry['parsed_whois']['registrar']['iana_id'], 'iana_id')
|
||||
for key, entry in whois_entry['parsed_whois']['contacts'].items():
|
||||
if entry['email']:
|
||||
values.add_mail(entry['email'], key)
|
||||
if entry['phone']:
|
||||
values.add_phone(entry['phone'], key)
|
||||
if entry['name']:
|
||||
values.add_name(entry['name'], key)
|
||||
if whois_entry.emails():
|
||||
for mail in whois_entry.emails():
|
||||
if mail not in values.reg_mail.keys():
|
||||
values.add_mail(mail, 'Maybe registrar')
|
||||
return values
|
||||
|
||||
|
||||
def domain_profile(domtools, to_query, values):
|
||||
profile = domtools.domain_profile(to_query)
|
||||
# NOTE: profile['website_data']['response_code'] could be used to see if the host is still up. Maybe set a tag.
|
||||
if profile.get('error'):
|
||||
misperrors['error'] = profile['error']['message']
|
||||
return misperrors
|
||||
|
||||
if profile.get('registrant'):
|
||||
values.add_name(profile['registrant']['name'], 'Profile registrant')
|
||||
|
||||
if profile.get('server'):
|
||||
other_domains = profile['server']['other_domains']
|
||||
values.add_ip(profile['server']['ip_address'], 'IP of {} (via DomainTools). Has {} other domains.'.format(to_query, other_domains))
|
||||
|
||||
if profile.get('registration'):
|
||||
if profile['registration'].get('created'):
|
||||
values.add_creation_date(profile['registration']['created'], 'created')
|
||||
if profile['registration'].get('updated'):
|
||||
values.add_creation_date(profile['registration']['updated'], 'updated')
|
||||
if profile['registration'].get('registrar'):
|
||||
values.add_registrar(profile['registration']['registrar'], 'name')
|
||||
return values
|
||||
|
||||
|
||||
def reputation(domtools, to_query, values):
|
||||
rep = domtools.reputation(to_query, include_reasons=True)
|
||||
# NOTE: use that value in a tag when we will have attribute level tagging
|
||||
if rep and not rep.get('error'):
|
||||
reasons = ', '.join(rep['reasons'])
|
||||
values.risk = [rep['risk_score'], 'Risk value of {} (via Domain Tools), Reasons: {}'.format(to_query, reasons)]
|
||||
return values
|
||||
|
||||
|
||||
def reverse_ip(domtools, to_query, values):
|
||||
rev_ip = domtools.reverse_ip(to_query)
|
||||
if rev_ip and not rev_ip.get('error'):
|
||||
ip_addresses = rev_ip['ip_addresses']
|
||||
values.add_ip(ip_addresses['ip_address'], 'IP of {} (via DomainTools). Has {} other domains.'.format(to_query, ip_addresses['domain_count']))
|
||||
for d in ip_addresses['domain_names']:
|
||||
values.add_domain(d, 'Other domain on {}.'.format(ip_addresses['ip_address']))
|
||||
return values
|
||||
|
||||
|
||||
def reverse_whois(domtools, to_query, values):
|
||||
rev_whois = domtools.reverse_whois(to_query, mode='purchase')
|
||||
if rev_whois.get('error'):
|
||||
misperrors['error'] = rev_whois['error']['message']
|
||||
return misperrors
|
||||
for d in rev_whois['domains']:
|
||||
values.add_domain(d, 'Reverse domain related to {}.'.format(to_query))
|
||||
return values
|
||||
|
||||
|
||||
def host_domains(domtools, to_query, values):
|
||||
hostdom = domtools.host_domains(to_query)
|
||||
if hostdom.get('error'):
|
||||
misperrors['error'] = hostdom['error']['message']
|
||||
return misperrors
|
||||
ip_addresses = hostdom['ip_addresses']
|
||||
if to_query != ip_addresses['ip_address']:
|
||||
values.add_ip(ip_addresses['ip_address'], 'IP of {} (via DomainTools). Has {} other domains.'.format(to_query, ip_addresses['domain_count']))
|
||||
for d in ip_addresses['domain_names']:
|
||||
values.add_domain(d, 'Other domain on {}.'.format(ip_addresses['ip_address']))
|
||||
return values
|
||||
|
||||
|
||||
def reverse_ip_whois(domtools, to_query, values):
|
||||
# Disabled for now, dies with domaintools.exceptions.NotAuthorizedException
|
||||
rev_whois = domtools.reverse_ip_whois(ip=to_query)
|
||||
print(rev_whois)
|
||||
if rev_whois.get('error'):
|
||||
misperrors['error'] = rev_whois['error']['message']
|
||||
return misperrors
|
||||
# for d in rev_whois['domains']:
|
||||
# values.add_domain(d, 'Reverse domain related to {}.'.format(to_query))
|
||||
return values
|
||||
|
||||
|
||||
def get_services(request):
|
||||
for t in mispattributes['input']:
|
||||
to_query = request.get(t)
|
||||
if not to_query:
|
||||
continue
|
||||
for p in query_profiles:
|
||||
if t in p['inputs']:
|
||||
return p['services']
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if not q:
|
||||
return q
|
||||
|
||||
request = json.loads(q)
|
||||
to_query = None
|
||||
for t in mispattributes['input']:
|
||||
to_query = request.get(t)
|
||||
if to_query:
|
||||
break
|
||||
if not to_query:
|
||||
misperrors['error'] = "Unsupported attributes type"
|
||||
return misperrors
|
||||
|
||||
if request.get('config'):
|
||||
if (request['config'].get('username') is None) or (request['config'].get('api_key') is None):
|
||||
misperrors['error'] = 'DomainTools authentication is incomplete'
|
||||
return misperrors
|
||||
else:
|
||||
domtools = API(request['config'].get('username'), request['config'].get('api_key'))
|
||||
else:
|
||||
misperrors['error'] = 'DomainTools authentication is missing'
|
||||
return misperrors
|
||||
|
||||
values = DomainTools()
|
||||
services = get_services(request)
|
||||
if services:
|
||||
try:
|
||||
for s in services:
|
||||
globals()[s](domtools, to_query, values)
|
||||
except Exception as e:
|
||||
print(to_query, type(e), e)
|
||||
|
||||
return {'results': values.dump()}
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -0,0 +1,3 @@
|
|||
[GEOIP]
|
||||
database = /opt/misp-modules/var/GeoIP.dat
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
import json
|
||||
import pygeoip
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
import configparser
|
||||
|
||||
log = logging.getLogger('geoip_country')
|
||||
log.setLevel(logging.DEBUG)
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
log.addHandler(ch)
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['ip-src', 'ip-dst', 'domain|ip'], 'output': ['freetext']}
|
||||
|
||||
# possible module-types: 'expansion', 'hover' or both
|
||||
moduleinfo = {'version': '0.1', 'author': 'Andreas Muehlemann',
|
||||
'description': 'Query a local copy of Maxminds Geolite database',
|
||||
'module-type': ['expansion', 'hover']}
|
||||
|
||||
try:
|
||||
# get current db from http://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz
|
||||
config = configparser.ConfigParser()
|
||||
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'geoip_country.cfg'))
|
||||
gi = pygeoip.GeoIP(config.get('GEOIP', 'database'))
|
||||
enabled = True
|
||||
except:
|
||||
enabled = False
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
if request.get('ip-dst'):
|
||||
toquery = request['ip-dst']
|
||||
elif request.get('ip-src'):
|
||||
toquery = request['ip-src']
|
||||
elif request.get('domain|ip'):
|
||||
toquery = request['domain|ip'].split('|')[1]
|
||||
else:
|
||||
return False
|
||||
|
||||
log.debug(toquery)
|
||||
|
||||
try:
|
||||
answer = gi.country_code_by_addr(toquery)
|
||||
except:
|
||||
misperrors['error'] = "GeoIP resolving error"
|
||||
return misperrors
|
||||
|
||||
r = {'results': [{'types': mispattributes['output'], 'values': [str(answer)]}]}
|
||||
|
||||
return r
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
# moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -0,0 +1,162 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Submit sample to VMRay.
|
||||
|
||||
Submit a sample to VMRay
|
||||
|
||||
TODO:
|
||||
# Deal with archive submissions
|
||||
|
||||
'''
|
||||
|
||||
import json
|
||||
import base64
|
||||
|
||||
import io
|
||||
import zipfile
|
||||
|
||||
from ._vmray.vmray_rest_api import VMRayRESTAPI
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['attachment', 'malware-sample'], 'output': ['text', 'sha1', 'sha256', 'md5', 'link']}
|
||||
moduleinfo = {'version': '0.2', 'author': 'Koen Van Impe',
|
||||
'description': 'Submit a sample to VMRay',
|
||||
'module-type': ['expansion']}
|
||||
moduleconfig = ['apikey', 'url', 'shareable', 'do_not_reanalyze', 'do_not_include_vmrayjobids']
|
||||
|
||||
|
||||
include_vmrayjobids = False
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
global include_vmrayjobids
|
||||
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
try:
|
||||
data = request.get("data")
|
||||
if 'malware-sample' in request:
|
||||
# malicious samples are encrypted with zip (password infected) and then base64 encoded
|
||||
sample_filename = request.get("malware-sample").split("|",1)[0]
|
||||
data = base64.b64decode(data)
|
||||
fl = io.BytesIO(data)
|
||||
zf = zipfile.ZipFile(fl)
|
||||
sample_hashname = zf.namelist()[0]
|
||||
data = zf.read(sample_hashname,b"infected")
|
||||
zf.close()
|
||||
elif 'attachment' in request:
|
||||
# All attachments get base64 encoded
|
||||
sample_filename = request.get("attachment")
|
||||
data = base64.b64decode(data)
|
||||
|
||||
else:
|
||||
misperrors['error'] = "No malware sample or attachment supplied"
|
||||
return misperrors
|
||||
except:
|
||||
misperrors['error'] = "Unable to process submited sample data"
|
||||
return misperrors
|
||||
|
||||
if (request["config"].get("apikey") is None) or (request["config"].get("url") is None):
|
||||
misperrors["error"] = "Missing API key or server URL (hint: try cloud.vmray.com)"
|
||||
return misperrors
|
||||
|
||||
api = VMRayRESTAPI(request["config"].get("url"), request["config"].get("apikey"), False)
|
||||
|
||||
shareable = request["config"].get("shareable")
|
||||
do_not_reanalyze = request["config"].get("do_not_reanalyze")
|
||||
do_not_include_vmrayjobids = request["config"].get("do_not_include_vmrayjobids")
|
||||
|
||||
# Do we want the sample to be shared?
|
||||
if shareable == "True":
|
||||
shareable = True
|
||||
else:
|
||||
shareable = False
|
||||
|
||||
# Always reanalyze the sample?
|
||||
if do_not_reanalyze == "True":
|
||||
do_not_reanalyze = True
|
||||
else:
|
||||
do_not_reanalyze = False
|
||||
reanalyze = not do_not_reanalyze
|
||||
|
||||
# Include the references to VMRay job IDs
|
||||
if do_not_include_vmrayjobids == "True":
|
||||
do_not_include_vmrayjobids = True
|
||||
else:
|
||||
do_not_include_vmrayjobids = False
|
||||
include_vmrayjobids = not do_not_include_vmrayjobids
|
||||
|
||||
if data and sample_filename:
|
||||
args = {}
|
||||
args["shareable"] = shareable
|
||||
args["sample_file"] = {'data': io.BytesIO(data), 'filename': sample_filename}
|
||||
args["reanalyze"] = reanalyze
|
||||
|
||||
try:
|
||||
vmraydata = vmraySubmit(api, args)
|
||||
if vmraydata["errors"]:
|
||||
misperrors['error'] = "VMRay: %s" % vmraydata["errors"][0]["error_msg"]
|
||||
return misperrors
|
||||
else:
|
||||
return vmrayProcess(vmraydata)
|
||||
except:
|
||||
misperrors['error'] = "Problem when calling API."
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = "No sample data or filename."
|
||||
return misperrors
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
||||
|
||||
|
||||
def vmrayProcess(vmraydata):
|
||||
''' Process the JSON file returned by vmray'''
|
||||
if vmraydata:
|
||||
try:
|
||||
submissions = vmraydata["submissions"][0]
|
||||
jobs = vmraydata["jobs"]
|
||||
|
||||
# Result received?
|
||||
if submissions and jobs:
|
||||
r = {'results': []}
|
||||
r["results"].append({"types": "md5", "values": submissions["submission_sample_md5"]})
|
||||
r["results"].append({"types": "sha1", "values": submissions["submission_sample_sha1"]})
|
||||
r["results"].append({"types": "sha256", "values": submissions["submission_sample_sha256"]})
|
||||
r["results"].append({"types": "text", "values": "VMRay Sample ID: %s" % submissions["submission_sample_id"]})
|
||||
r["results"].append({"types": "text", "values": "VMRay Submission ID: %s" % submissions["submission_id"]})
|
||||
r["results"].append({"types": "text", "values": "VMRay Submission Sample IP: %s" % submissions["submission_ip_ip"]})
|
||||
r["results"].append({"types": "link", "values": submissions["submission_webif_url"]})
|
||||
|
||||
# Include data from different jobs
|
||||
if include_vmrayjobids:
|
||||
for job in jobs:
|
||||
job_id = job["job_id"]
|
||||
job_vm_name = job["job_vm_name"]
|
||||
job_configuration_name = job["job_configuration_name"]
|
||||
r["results"].append({"types": "text", "values": "VMRay Job ID %s (%s - %s)" % (job_id, job_vm_name, job_configuration_name)})
|
||||
return r
|
||||
else:
|
||||
misperrors['error'] = "No valid results returned."
|
||||
return misperrors
|
||||
except:
|
||||
misperrors['error'] = "No valid submission data returned."
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = "Unable to parse results."
|
||||
return misperrors
|
||||
|
||||
|
||||
def vmraySubmit(api, args):
|
||||
''' Submit the sample to VMRay'''
|
||||
vmraydata = api.call("POST", "/rest/sample/submit", args)
|
||||
return vmraydata
|
|
@ -3,10 +3,10 @@ import requests
|
|||
from SPARQLWrapper import SPARQLWrapper, JSON
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['label'], 'output': ['text']}
|
||||
moduleinfo = {'version': '0.1', 'author': 'Roman Graf', 'description': 'An expansion hover module to extract information from Wikidata to have additional information about particular term for analysis.', 'module-type': ['hover']}
|
||||
mispattributes = {'input': ['text'], 'output': ['text']}
|
||||
moduleinfo = {'version': '0.2', 'author': 'Roman Graf', 'description': 'An expansion hover module to extract information from Wikidata to have additional information about particular term for analysis.', 'module-type': ['hover']}
|
||||
moduleconfig = []
|
||||
# sample query label 'Microsoft' should provide Wikidata link https://www.wikidata.org/wiki/Q2283 in response
|
||||
# sample query text 'Microsoft' should provide Wikidata link https://www.wikidata.org/wiki/Q2283 in response
|
||||
wiki_api_url = 'https://query.wikidata.org/bigdata/namespace/wdq/sparql'
|
||||
|
||||
|
||||
|
@ -14,15 +14,15 @@ def handler(q=False):
|
|||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
if not request.get('label'):
|
||||
misperrors['error'] = 'Query label missing'
|
||||
if not request.get('text'):
|
||||
misperrors['error'] = 'Query text missing'
|
||||
return misperrors
|
||||
|
||||
sparql = SPARQLWrapper(wiki_api_url)
|
||||
query_string = \
|
||||
"SELECT ?item \n" \
|
||||
"WHERE { \n" \
|
||||
"?item rdfs:label\"" + request.get('label') + "\" @en \n" \
|
||||
"?item rdfs:label\"" + request.get('text') + "\" @en \n" \
|
||||
"}\n";
|
||||
sparql.setQuery(query_string)
|
||||
sparql.setReturnFormat(JSON)
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
__all__ = ['testimport', 'ocr', 'stiximport']
|
||||
from . import _vmray
|
||||
|
||||
__all__ = ['vmray_import', 'testimport', 'ocr', 'stiximport', 'cuckooimport']
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
#!/usr/bin/python3
|
||||
"""Python client library for VMRay REST API"""
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import os.path
|
||||
import requests
|
||||
import urllib.parse
|
||||
|
||||
# disable nasty certification warning
|
||||
# pylint: disable=no-member
|
||||
try:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
except AttributeError:
|
||||
try:
|
||||
import urllib3
|
||||
try:
|
||||
urllib3.disable_warnings()
|
||||
except AttributeError:
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# pylint: disable=
|
||||
|
||||
|
||||
class VMRayRESTAPIError(Exception):
|
||||
"""Exception class that is used when API returns an error"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.status_code = kwargs.pop("status_code", None)
|
||||
Exception.__init__(self, *args, **kwargs)
|
||||
|
||||
|
||||
def handle_rest_api_result(result):
|
||||
"""Handle result of API request (check for errors)"""
|
||||
|
||||
if (result.status_code < 200) or (result.status_code > 299):
|
||||
try:
|
||||
json_result = result.json()
|
||||
except ValueError:
|
||||
raise VMRayRESTAPIError("API returned error %u: %s" % (result.status_code, result.text), status_code=result.status_code)
|
||||
|
||||
raise VMRayRESTAPIError(json_result.get("error_msg", "Unknown error"), status_code=result.status_code)
|
||||
|
||||
|
||||
class VMRayRESTAPI(object):
|
||||
"""VMRay REST API class"""
|
||||
|
||||
def __init__(self, server, api_key, verify_cert=True):
|
||||
# split server URL into components
|
||||
url_desc = urllib.parse.urlsplit(server)
|
||||
|
||||
# assume HTTPS if no scheme is specified
|
||||
if url_desc.scheme == "":
|
||||
server = "https://" + server
|
||||
|
||||
# save variables
|
||||
self.server = server
|
||||
self.api_key = api_key
|
||||
self.verify_cert = verify_cert
|
||||
|
||||
def call(self, http_method, api_path, params=None, raw_data=False):
|
||||
"""Call VMRay REST API"""
|
||||
|
||||
# get function of requests package
|
||||
requests_func = getattr(requests, http_method.lower())
|
||||
|
||||
# parse parameters
|
||||
req_params = {}
|
||||
file_params = {}
|
||||
|
||||
if params is not None:
|
||||
for key, value in params.items():
|
||||
if isinstance(value, (datetime.date,
|
||||
datetime.datetime,
|
||||
float,
|
||||
int)):
|
||||
req_params[key] = str(value)
|
||||
elif isinstance(value, str):
|
||||
req_params[key] = str(value)
|
||||
elif isinstance(value, dict):
|
||||
filename = value["filename"]
|
||||
sample = value["data"]
|
||||
file_params[key] = (filename, sample, "application/octet-stream")
|
||||
elif hasattr(value, "read"):
|
||||
filename = os.path.split(value.name)[1]
|
||||
# For the following block refer to DEV-1820
|
||||
try:
|
||||
filename.decode("ASCII")
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
b64_key = key + "name_b64enc"
|
||||
byte_value = filename.encode("utf-8")
|
||||
b64_value = base64.b64encode(byte_value)
|
||||
|
||||
filename = "@param=%s" % b64_key
|
||||
req_params[b64_key] = b64_value
|
||||
file_params[key] = (filename, value, "application/octet-stream")
|
||||
else:
|
||||
raise VMRayRESTAPIError("Parameter \"%s\" has unknown type \"%s\"" % (key, type(value)))
|
||||
|
||||
# construct request
|
||||
if file_params:
|
||||
files = file_params
|
||||
else:
|
||||
files = None
|
||||
|
||||
# we need to adjust some stuff for POST requests
|
||||
if http_method.lower() == "post":
|
||||
req_data = req_params
|
||||
req_params = None
|
||||
else:
|
||||
req_data = None
|
||||
|
||||
# do request
|
||||
result = requests_func(self.server + api_path, data=req_data, params=req_params, headers={"Authorization": "api_key " + self.api_key}, files=files, verify=self.verify_cert, stream=raw_data)
|
||||
handle_rest_api_result(result)
|
||||
|
||||
if raw_data:
|
||||
return result.raw
|
||||
|
||||
# parse result
|
||||
try:
|
||||
json_result = result.json()
|
||||
except ValueError:
|
||||
raise ValueError("API returned invalid JSON: %s" % (result.text))
|
||||
|
||||
# if there are no cached elements then return the data
|
||||
if "continuation_id" not in json_result:
|
||||
return json_result.get("data", None)
|
||||
|
||||
data = json_result["data"]
|
||||
|
||||
# get cached results
|
||||
while "continuation_id" in json_result:
|
||||
# send request to server
|
||||
result = requests.get("%s/rest/continuation/%u" % (self.server, json_result["continuation_id"]), headers={"Authorization": "api_key " + self.api_key}, verify=self.verify_cert)
|
||||
handle_rest_api_result(result)
|
||||
|
||||
# parse result
|
||||
try:
|
||||
json_result = result.json()
|
||||
except ValueError:
|
||||
raise ValueError("API returned invalid JSON: %s" % (result.text))
|
||||
|
||||
data.extend(json_result["data"])
|
||||
|
||||
return data
|
|
@ -0,0 +1,196 @@
|
|||
import json
|
||||
import logging
|
||||
import sys
|
||||
import base64
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
userConfig = {}
|
||||
inputSource = ['file']
|
||||
|
||||
moduleinfo = {'version': '0.1', 'author': 'Victor van der Stoep',
|
||||
'description': 'Cuckoo JSON import',
|
||||
'module-type': ['import']}
|
||||
|
||||
moduleconfig = []
|
||||
|
||||
def handler(q=False):
|
||||
# Just in case we have no data
|
||||
if q is False:
|
||||
return False
|
||||
|
||||
# The return value
|
||||
r = {'results': []}
|
||||
|
||||
# Load up that JSON
|
||||
q = json.loads(q)
|
||||
data = base64.b64decode(q.get("data")).decode('utf-8')
|
||||
|
||||
# If something really weird happened
|
||||
if not data:
|
||||
return json.dumps({"success": 0})
|
||||
|
||||
data = json.loads(data)
|
||||
|
||||
# Get characteristics of file
|
||||
targetFile = data['target']['file']
|
||||
|
||||
# Process the inital binary
|
||||
processBinary(r, targetFile, initial = True)
|
||||
|
||||
# Get binary information for dropped files
|
||||
if(data.get('dropped')):
|
||||
for droppedFile in data['dropped']:
|
||||
processBinary(r, droppedFile, dropped = True)
|
||||
|
||||
# Add malscore to results
|
||||
r["results"].append({
|
||||
"values": "Malscore: {} ".format(data['malscore']),
|
||||
"types": "comment",
|
||||
"categories": "Payload delivery",
|
||||
"comment": "Cuckoo analysis: MalScore"
|
||||
})
|
||||
|
||||
# Add virustotal data, if exists
|
||||
if(data.get('virustotal')):
|
||||
processVT(r, data['virustotal'])
|
||||
|
||||
# Add network information, should be improved
|
||||
processNetwork(r, data['network'])
|
||||
|
||||
# Add behavioral information
|
||||
processSummary(r, data['behavior']['summary'])
|
||||
|
||||
# Return
|
||||
return r
|
||||
|
||||
def processSummary(r, summary):
|
||||
r["results"].append({
|
||||
"values": summary['mutexes'],
|
||||
"types": "mutex",
|
||||
"categories": "Artifacts dropped",
|
||||
"comment": "Cuckoo analysis: Observed mutexes"
|
||||
})
|
||||
|
||||
def processVT(r, virustotal):
|
||||
category = "Antivirus detection"
|
||||
comment = "VirusTotal analysis"
|
||||
|
||||
if(virustotal.get('permalink')):
|
||||
r["results"].append({
|
||||
"values": virustotal['permalink'],
|
||||
"types": "link",
|
||||
"categories": category,
|
||||
"comments": comment + " - Permalink"
|
||||
})
|
||||
|
||||
if(virustotal.get('total')):
|
||||
r["results"].append({
|
||||
"values": "VirusTotal detection rate {}/{}".format(
|
||||
virustotal['positives'],
|
||||
virustotal['total']
|
||||
),
|
||||
"types": "comment",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
else:
|
||||
r["results"].append({
|
||||
"values": "Sample not detected on VirusTotal",
|
||||
"types": "comment",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
|
||||
def processNetwork(r, network):
|
||||
category = "Network activity"
|
||||
|
||||
for host in network['hosts']:
|
||||
r["results"].append({
|
||||
"values": host['ip'],
|
||||
"types": "ip-dst",
|
||||
"categories": category,
|
||||
"comment": "Cuckoo analysis: Observed network traffic"
|
||||
})
|
||||
|
||||
|
||||
def processBinary(r, target, initial = False, dropped = False):
|
||||
if(initial):
|
||||
comment = "Cuckoo analysis: Initial file"
|
||||
category = "Payload delivery"
|
||||
elif(dropped):
|
||||
category = "Artifacts dropped"
|
||||
comment = "Cuckoo analysis: Dropped file"
|
||||
|
||||
r["results"].append({
|
||||
"values": target['name'],
|
||||
"types": "filename",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
r["results"].append({
|
||||
"values": target['md5'],
|
||||
"types": "md5",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
r["results"].append({
|
||||
"values": target['sha1'],
|
||||
"types": "sha1",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
r["results"].append({
|
||||
"values": target['sha256'],
|
||||
"types": "sha256",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
r["results"].append({
|
||||
"values": target['sha512'],
|
||||
"types": "sha512",
|
||||
"categories": category,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
# todo : add file size?
|
||||
|
||||
if(target.get('guest_paths')):
|
||||
r["results"].append({
|
||||
"values": target['guest_paths'],
|
||||
"types": "filename",
|
||||
"categories": "Payload installation",
|
||||
"comment": comment + " - Path"
|
||||
})
|
||||
|
||||
|
||||
def introspection():
|
||||
modulesetup = {}
|
||||
try:
|
||||
userConfig
|
||||
modulesetup['userConfig'] = userConfig
|
||||
except NameError:
|
||||
pass
|
||||
try:
|
||||
inputSource
|
||||
modulesetup['inputSource'] = inputSource
|
||||
except NameError:
|
||||
pass
|
||||
return modulesetup
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
||||
|
||||
if __name__ == '__main__':
|
||||
x = open('test.json', 'r')
|
||||
q = []
|
||||
q['data'] = x.read()
|
||||
q = base64.base64encode(q)
|
||||
|
||||
handler(q)
|
|
@ -1,19 +1,17 @@
|
|||
import json
|
||||
from stix.core import STIXPackage
|
||||
import re
|
||||
import base64
|
||||
import hashlib
|
||||
import tempfile
|
||||
|
||||
from pymisp.tools import stix
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
userConfig = {}
|
||||
inputSource = ['file']
|
||||
|
||||
moduleinfo = {'version': '0.1', 'author': 'Hannah Ward',
|
||||
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
|
||||
'description': 'Import some stix stuff',
|
||||
'module-type': ['import']}
|
||||
|
||||
moduleconfig = ["max_size"]
|
||||
moduleconfig = []
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
|
@ -28,214 +26,19 @@ def handler(q=False):
|
|||
q = json.loads(q)
|
||||
|
||||
# It's b64 encoded, so decode that stuff
|
||||
package = str(base64.b64decode(q.get("data", None)), 'utf-8')
|
||||
package = base64.b64decode(q.get("data")).decode('utf-8')
|
||||
|
||||
# If something really weird happened
|
||||
if not package:
|
||||
return json.dumps({"success": 0})
|
||||
|
||||
# Get the maxsize from the config
|
||||
# Default to 10MB
|
||||
# (I believe the max_size arg is given in bytes)
|
||||
# Check if we were given a configuration
|
||||
memsize = q.get("config", None)
|
||||
pkg = stix.load_stix(package)
|
||||
|
||||
# If we were, find out if there's a memsize field
|
||||
if memsize:
|
||||
memsize = memsize.get("max_size", 10 * 1024)
|
||||
else:
|
||||
memsize = 10 * 1024
|
||||
|
||||
# Load up the package into STIX
|
||||
package = loadPackage(package, memsize)
|
||||
|
||||
# Build all the observables
|
||||
if package.observables:
|
||||
for obs in package.observables:
|
||||
r["results"].append(buildObservable(obs))
|
||||
|
||||
# And now the threat actors
|
||||
if package.threat_actors:
|
||||
for ta in package.threat_actors:
|
||||
r["results"].append(buildActor(ta))
|
||||
|
||||
# Aaaand the indicators
|
||||
if package.indicators:
|
||||
for ind in package.indicators:
|
||||
r["results"] += buildIndicator(ind)
|
||||
|
||||
# Are you seeing a pattern?
|
||||
if package.exploit_targets:
|
||||
for et in package.exploit_targets:
|
||||
r["results"].append(buildExploitTarget(et))
|
||||
|
||||
# LOADING STUFF
|
||||
if package.campaigns:
|
||||
for cpn in package.campaigns:
|
||||
r["results"].append(buildCampaign(cpn))
|
||||
|
||||
# Clean up results
|
||||
# Don't send on anything that didn't have a value
|
||||
r["results"] = [x for x in r["results"] if isinstance(x, dict) and len(x["values"]) != 0]
|
||||
return r
|
||||
|
||||
# Quick and dirty regex for IP addresses
|
||||
ipre = re.compile("([0-9]{1,3}.){3}[0-9]{1,3}")
|
||||
|
||||
|
||||
def buildCampaign(cpn):
|
||||
"""
|
||||
Extract a campaign name
|
||||
"""
|
||||
return {"values": [cpn.title], "types": ["campaign-name"]}
|
||||
|
||||
|
||||
def buildExploitTarget(et):
|
||||
"""
|
||||
Extract CVEs from exploit targets
|
||||
"""
|
||||
|
||||
r = {"values": [], "types": ["vulnerability"]}
|
||||
|
||||
if et.vulnerabilities:
|
||||
for v in et.vulnerabilities:
|
||||
if v.cve_id:
|
||||
r["values"].append(v.cve_id)
|
||||
return r
|
||||
|
||||
|
||||
|
||||
def identifyHash(hsh):
|
||||
"""
|
||||
What's that hash!?
|
||||
"""
|
||||
|
||||
possible_hashes = []
|
||||
|
||||
hashes = [x for x in hashlib.algorithms_guaranteed]
|
||||
|
||||
for h in hashes:
|
||||
if len(str(hsh)) == len(hashlib.new(h).hexdigest()):
|
||||
possible_hashes.append(h)
|
||||
possible_hashes.append("filename|{}".format(h))
|
||||
return possible_hashes
|
||||
|
||||
|
||||
def buildIndicator(ind):
|
||||
"""
|
||||
Extract hashes
|
||||
and other fun things
|
||||
like that
|
||||
"""
|
||||
r = []
|
||||
# Try to get hashes. I hate stix
|
||||
if ind.observables:
|
||||
for i in ind.observables:
|
||||
if i.observable_composition:
|
||||
for j in i.observable_composition.observables:
|
||||
r.append(buildObservable(j))
|
||||
r.append(buildObservable(i))
|
||||
return r
|
||||
|
||||
|
||||
def buildActor(ta):
|
||||
"""
|
||||
Extract the name
|
||||
and comment of a
|
||||
threat actor
|
||||
"""
|
||||
|
||||
r = {"values": [ta.title], "types": ["threat-actor"]}
|
||||
for attrib in pkg.attributes:
|
||||
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
|
||||
|
||||
return r
|
||||
|
||||
|
||||
def buildObservable(o):
|
||||
"""
|
||||
Take a STIX observable
|
||||
and extract the value
|
||||
and category
|
||||
"""
|
||||
# Life is easier with json
|
||||
if not isinstance(o, dict):
|
||||
o = json.loads(o.to_json())
|
||||
# Make a new record to store values in
|
||||
r = {"values": []}
|
||||
|
||||
# Get the object properties. This contains all the
|
||||
# fun stuff like values
|
||||
if "observable_composition" in o:
|
||||
# May as well be useless
|
||||
return r
|
||||
|
||||
props = o["object"]["properties"]
|
||||
|
||||
# If it has an address_value field, it's gonna be an address
|
||||
# Kinda obvious really
|
||||
if "address_value" in props:
|
||||
|
||||
# We've got ourselves a nice little address
|
||||
value = props["address_value"]
|
||||
|
||||
if isinstance(value, dict):
|
||||
# Sometimes it's embedded in a dictionary
|
||||
value = value["value"]
|
||||
|
||||
# Is it an IP?
|
||||
if ipre.match(str(value)):
|
||||
# Yes!
|
||||
r["values"].append(value)
|
||||
r["types"] = ["ip-src", "ip-dst"]
|
||||
else:
|
||||
# Probably a domain yo
|
||||
r["values"].append(value)
|
||||
r["types"] = ["domain", "hostname"]
|
||||
|
||||
if "hashes" in props:
|
||||
for hsh in props["hashes"]:
|
||||
r["values"].append(hsh["simple_hash_value"]["value"])
|
||||
r["types"] = identifyHash(hsh["simple_hash_value"]["value"])
|
||||
|
||||
elif "xsi:type" in props:
|
||||
# Cybox. Ew.
|
||||
try:
|
||||
type_ = props["xsi:type"]
|
||||
val = props["value"]
|
||||
|
||||
if type_ == "LinkObjectType":
|
||||
r["types"] = ["link"]
|
||||
r["values"].append(val)
|
||||
else:
|
||||
print("Ignoring {}".format(type_))
|
||||
except:
|
||||
pass
|
||||
return r
|
||||
|
||||
|
||||
def loadPackage(data, memsize=1024):
|
||||
# Write the stix package to a tmp file
|
||||
|
||||
temp = tempfile.SpooledTemporaryFile(max_size=int(memsize), mode="w+")
|
||||
|
||||
temp.write(data)
|
||||
|
||||
# Back to the beginning so we can read it again
|
||||
temp.seek(0)
|
||||
try:
|
||||
# Try loading it into every format we know of
|
||||
try:
|
||||
package = STIXPackage().from_xml(temp)
|
||||
except:
|
||||
# We have to seek back again
|
||||
temp.seek(0)
|
||||
package = STIXPackage().from_json(temp)
|
||||
except Exception:
|
||||
print("Failed to load package")
|
||||
raise ValueError("COULD NOT LOAD STIX PACKAGE!")
|
||||
temp.close()
|
||||
return package
|
||||
|
||||
|
||||
def introspection():
|
||||
modulesetup = {}
|
||||
try:
|
||||
|
|
|
@ -0,0 +1,319 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Import VMRay results.
|
||||
|
||||
This version supports import from different analyze jobs, starting from one sample
|
||||
(the supplied sample_id).
|
||||
|
||||
Requires "vmray_rest_api"
|
||||
|
||||
TODO:
|
||||
# Import one job (analyze_id)
|
||||
# Import STIX package (XML version)
|
||||
|
||||
'''
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
from ._vmray.vmray_rest_api import VMRayRESTAPI
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
inputSource = []
|
||||
moduleinfo = {'version': '0.1', 'author': 'Koen Van Impe',
|
||||
'description': 'Import VMRay (VTI) results',
|
||||
'module-type': ['import']}
|
||||
userConfig = {'include_textdescr': {'type': 'Boolean',
|
||||
'message': 'Include textual description'
|
||||
},
|
||||
'include_analysisid': {'type': 'Boolean',
|
||||
'message': 'Include VMRay analysis_id text'
|
||||
},
|
||||
'only_network_info': {'type': 'Boolean',
|
||||
'message': 'Only include network (src-ip, hostname, domain, ...) information'
|
||||
},
|
||||
'sample_id': {'type': 'Integer',
|
||||
'errorMessage': 'Expected a sample ID',
|
||||
'message': 'The VMRay sample_id'
|
||||
}
|
||||
}
|
||||
|
||||
moduleconfig = ['apikey', 'url']
|
||||
|
||||
include_textdescr = False
|
||||
include_analysisid = False
|
||||
only_network_info = False
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
global include_textdescr
|
||||
global include_analysisid
|
||||
global only_network_info
|
||||
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
include_textdescr = request["config"].get("include_textdescr")
|
||||
include_analysisid = request["config"].get("include_analysisid")
|
||||
only_network_info = request["config"].get("only_network_info")
|
||||
if include_textdescr == "1":
|
||||
include_textdescr = True
|
||||
else:
|
||||
include_textdescr = False
|
||||
if include_analysisid == "1":
|
||||
include_analysisid = True
|
||||
else:
|
||||
include_analysisid = False
|
||||
if only_network_info == "1":
|
||||
only_network_info = True
|
||||
else:
|
||||
only_network_info = False
|
||||
|
||||
sample_id = int(request["config"].get("sample_id"))
|
||||
|
||||
if (request["config"].get("apikey") is None) or (request["config"].get("url") is None):
|
||||
misperrors["error"] = "Missing API key or server URL (hint: try cloud.vmray.com)"
|
||||
return misperrors
|
||||
|
||||
if sample_id > 0:
|
||||
try:
|
||||
api = VMRayRESTAPI(request["config"].get("url"), request["config"].get("apikey"), False)
|
||||
vmray_results = {'results': []}
|
||||
# Get all information on the sample, returns a set of finished analyze jobs
|
||||
data = vmrayGetInfoAnalysis(api, sample_id)
|
||||
if data["data"]:
|
||||
vti_patterns_found = False
|
||||
for analysis in data["data"]:
|
||||
analysis_id = analysis["analysis_id"]
|
||||
|
||||
if analysis_id > 0:
|
||||
# Get the details for an analyze job
|
||||
analysis_data = vmrayDownloadAnalysis(api, analysis_id)
|
||||
|
||||
if analysis_data:
|
||||
p = vmrayVtiPatterns(analysis_data["vti_patterns"])
|
||||
if p and len(p["results"]) > 0:
|
||||
vti_patterns_found = True
|
||||
vmray_results = {'results': vmray_results["results"] + p["results"]}
|
||||
if include_analysisid:
|
||||
a_id = {'results': []}
|
||||
url1 = "https://cloud.vmray.com/user/analysis/view?from_sample_id=%u" % sample_id
|
||||
url2 = "&id=%u" % analysis_id
|
||||
url3 = "&sub=%2Freport%2Foverview.html"
|
||||
a_id["results"].append({ "values": url1 + url2 + url3, "types": "link" })
|
||||
vmray_results = {'results': vmray_results["results"] + a_id["results"] }
|
||||
# Clean up (remove doubles)
|
||||
if vti_patterns_found:
|
||||
vmray_results = vmrayCleanup(vmray_results)
|
||||
return vmray_results
|
||||
else:
|
||||
misperrors['error'] = "No vti_results returned or jobs not finished"
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = "Unable to fetch sample id %u" % (sample_id)
|
||||
return misperrors
|
||||
except:
|
||||
misperrors['error'] = "Unable to access VMRay API"
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = "Not a valid sample id"
|
||||
return misperrors
|
||||
|
||||
|
||||
def introspection():
|
||||
modulesetup = {}
|
||||
try:
|
||||
userConfig
|
||||
modulesetup['userConfig'] = userConfig
|
||||
except NameError:
|
||||
pass
|
||||
try:
|
||||
inputSource
|
||||
modulesetup['inputSource'] = inputSource
|
||||
except NameError:
|
||||
pass
|
||||
return modulesetup
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
||||
|
||||
|
||||
def vmrayGetInfoAnalysis(api, sample_id):
|
||||
''' Get information from a sample, returns a set of analyzed reports'''
|
||||
|
||||
if sample_id:
|
||||
data = api.call("GET", "/rest/analysis/sample/%u" % (sample_id), raw_data=True)
|
||||
return json.loads(data.read().decode())
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def vmrayDownloadAnalysis(api, analysis_id):
|
||||
''' Get the details from an analysis'''
|
||||
if analysis_id:
|
||||
data = api.call("GET", "/rest/analysis/%u/archive/additional/vti_result.json" % (analysis_id), raw_data=True)
|
||||
return json.loads(data.read().decode())
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def vmrayVtiPatterns(vti_patterns):
|
||||
''' Match the VTI patterns to MISP data'''
|
||||
|
||||
if vti_patterns:
|
||||
r = {'results': []}
|
||||
y = {'results': []}
|
||||
|
||||
for pattern in vti_patterns:
|
||||
content = False
|
||||
if pattern["category"] == "_network" and pattern["operation"] == "_download_data":
|
||||
content = vmrayGeneric(pattern, "url", 1)
|
||||
elif pattern["category"] == "_network" and pattern["operation"] == "_connect":
|
||||
content = vmrayConnect(pattern)
|
||||
elif pattern["category"] == "_network" and pattern["operation"] == "_install_server":
|
||||
content = vmrayGeneric(pattern)
|
||||
|
||||
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_alloc_wx_page":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_install_ipc_endpoint":
|
||||
content = vmrayGeneric(pattern, "mutex", 1)
|
||||
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_crashed_process":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_read_from_remote_process":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_create_process_with_hidden_window":
|
||||
content = vmrayGeneric(pattern)
|
||||
|
||||
elif only_network_info is False and pattern["category"] == "_anti_analysis" and pattern["operation"] == "_delay_execution":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_anti_analysis" and pattern["operation"] == "_dynamic_api_usage":
|
||||
content = vmrayGeneric(pattern)
|
||||
|
||||
elif only_network_info is False and pattern["category"] == "_static" and pattern["operation"] == "_drop_pe_file":
|
||||
content = vmrayGeneric(pattern, "filename", 1)
|
||||
elif only_network_info is False and pattern["category"] == "_static" and pattern["operation"] == "_execute_dropped_pe_file":
|
||||
content = vmrayGeneric(pattern, "filename", 1)
|
||||
|
||||
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_memory":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_memory_system":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_memory_non_system":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_control_flow":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_control_flow_non_system":
|
||||
content = vmrayGeneric(pattern)
|
||||
elif only_network_info is False and pattern["category"] == "_file_system" and pattern["operation"] == "_create_many_files":
|
||||
content = vmrayGeneric(pattern)
|
||||
|
||||
elif only_network_info is False and pattern["category"] == "_hide_tracks" and pattern["operation"] == "_hide_data_in_registry":
|
||||
content = vmrayGeneric(pattern, "regkey", 1)
|
||||
|
||||
elif only_network_info is False and pattern["category"] == "_persistence" and pattern["operation"] == "_install_startup_script":
|
||||
content = vmrayGeneric(pattern, "regkey", 1)
|
||||
elif only_network_info is False and pattern["category"] == "_os" and pattern["operation"] == "_enable_process_privileges":
|
||||
content = vmrayGeneric(pattern)
|
||||
|
||||
if content:
|
||||
r["results"].append(content["attributes"])
|
||||
r["results"].append(content["text"])
|
||||
|
||||
# Remove empty results
|
||||
r["results"] = [x for x in r["results"] if isinstance(x, dict) and len(x["values"]) != 0]
|
||||
for el in r["results"]:
|
||||
if el not in y["results"]:
|
||||
y["results"].append(el)
|
||||
return y
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def vmrayCleanup(x):
|
||||
''' Remove doubles'''
|
||||
y = {'results': []}
|
||||
|
||||
for el in x["results"]:
|
||||
if el not in y["results"]:
|
||||
y["results"].append(el)
|
||||
return y
|
||||
|
||||
|
||||
def vmraySanitizeInput(s):
|
||||
''' Sanitize some input so it gets properly imported in MISP'''
|
||||
if s:
|
||||
s = s.replace('"', '')
|
||||
s = re.sub('\\\\', r'\\', s)
|
||||
return s
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def vmrayGeneric(el, attr="", attrpos=1):
|
||||
''' Convert a 'generic' VTI pattern to MISP data'''
|
||||
|
||||
r = {"values": []}
|
||||
f = {"values": []}
|
||||
|
||||
if el:
|
||||
content = el["technique_desc"]
|
||||
if content:
|
||||
if attr:
|
||||
# Some elements are put between \"\" ; replace them to single
|
||||
content = content.replace("\"\"","\"")
|
||||
content_split = content.split("\"")
|
||||
# Attributes are between open " and close "; so use >
|
||||
if len(content_split) > attrpos:
|
||||
content_split[attrpos] = vmraySanitizeInput(content_split[attrpos])
|
||||
r["values"].append(content_split[attrpos])
|
||||
r["types"] = [attr]
|
||||
|
||||
# Adding the value also as text to get the extra description,
|
||||
# but this is pretty useless for "url"
|
||||
if include_textdescr and attr != "url":
|
||||
f["values"].append(vmraySanitizeInput(content))
|
||||
f["types"] = ["text"]
|
||||
|
||||
return {"text": f, "attributes": r}
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def vmrayConnect(el):
|
||||
''' Extension of vmrayGeneric , parse network connect data'''
|
||||
ipre = re.compile("([0-9]{1,3}.){3}[0-9]{1,3}")
|
||||
|
||||
r = {"values": []}
|
||||
f = {"values": []}
|
||||
|
||||
if el:
|
||||
content = el["technique_desc"]
|
||||
if content:
|
||||
target = content.split("\"")
|
||||
# port = (target[1].split(":"))[1] ## FIXME: not used
|
||||
host = (target[1].split(":"))[0]
|
||||
if ipre.match(str(host)):
|
||||
r["values"].append(host)
|
||||
r["types"] = ["ip-dst"]
|
||||
else:
|
||||
r["values"].append(host)
|
||||
r["types"] = ["domain", "hostname"]
|
||||
|
||||
f["values"].append(vmraySanitizeInput(target[1]))
|
||||
f["types"] = ["text"]
|
||||
|
||||
if include_textdescr:
|
||||
f["values"].append(vmraySanitizeInput(content))
|
||||
f["types"] = ["text"]
|
||||
|
||||
return {"text": f, "attributes": r}
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
|
@ -38,15 +38,19 @@ class TestModules(unittest.TestCase):
|
|||
response.connection.close()
|
||||
|
||||
def test_stix(self):
|
||||
with open("tests/stix.xml", "r") as f:
|
||||
data = json.dumps({"module":"stiximport",
|
||||
"data":str(base64.b64encode(bytes(f.read(), 'utf-8'))),
|
||||
"config": {"max_size": "15000"},
|
||||
})
|
||||
with open("tests/stix.xml", "rb") as f:
|
||||
content = base64.b64encode(f.read())
|
||||
data = json.dumps({"module": "stiximport",
|
||||
"data": content.decode('utf-8'),
|
||||
})
|
||||
response = requests.post(self.url + "query", data=data).json()
|
||||
|
||||
response = requests.post(self.url + "query", data=data)
|
||||
response.connection.close()
|
||||
print(response.json())
|
||||
print("STIX :: {}".format(response))
|
||||
values = [x["values"][0] for x in response["results"]]
|
||||
|
||||
assert("209.239.79.47" in values)
|
||||
assert("41.213.121.180" in values)
|
||||
assert("eu-society.com" in values)
|
||||
|
||||
def test_email_headers(self):
|
||||
with open("tests/test_no_attach.eml", "r") as f:
|
||||
|
@ -184,10 +188,10 @@ class TestModules(unittest.TestCase):
|
|||
# and pass if it can't find one
|
||||
|
||||
if not os.path.exists("tests/bodyvirustotal.json"):
|
||||
return
|
||||
return
|
||||
|
||||
with open("tests/bodyvirustotal.json", "r") as f:
|
||||
response = requests.post(self.url + "query", data=f.read()).json()
|
||||
response = requests.post(self.url + "query", data=f.read()).json()
|
||||
assert(response)
|
||||
response.connection.close()
|
||||
|
||||
|
@ -208,5 +212,15 @@ def helper_create_email(**conf):
|
|||
|
||||
|
||||
|
||||
#def test_domaintools(self):
|
||||
# query = {'config': {'username': 'test_user', 'api_key': 'test_key'}, 'module': 'domaintools', 'domain': 'domaintools.com'}
|
||||
# try:
|
||||
# response = requests.post(self.url + "query", data=json.dumps(query)).json()
|
||||
# except:
|
||||
# pass
|
||||
# response = requests.post(self.url + "query", data=json.dumps(query)).json()
|
||||
# print(response)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
Loading…
Reference in New Issue