|
@ -0,0 +1,289 @@
|
|||
# -*- coding: utf-8; mode: python -*-
|
||||
##
|
||||
## Format
|
||||
##
|
||||
## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...]
|
||||
##
|
||||
## Description
|
||||
##
|
||||
## ACTION is one of 'chg', 'fix', 'new'
|
||||
##
|
||||
## Is WHAT the change is about.
|
||||
##
|
||||
## 'chg' is for refactor, small improvement, cosmetic changes...
|
||||
## 'fix' is for bug fixes
|
||||
## 'new' is for new features, big improvement
|
||||
##
|
||||
## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc'|'docs'
|
||||
##
|
||||
## Is WHO is concerned by the change.
|
||||
##
|
||||
## 'dev' is for developpers (API changes, refactors...)
|
||||
## 'usr' is for final users (UI changes)
|
||||
## 'pkg' is for packagers (packaging changes)
|
||||
## 'test' is for testers (test only related changes)
|
||||
## 'doc' is for doc guys (doc only changes)
|
||||
##
|
||||
## COMMIT_MSG is ... well ... the commit message itself.
|
||||
##
|
||||
## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic'
|
||||
##
|
||||
## They are preceded with a '!' or a '@' (prefer the former, as the
|
||||
## latter is wrongly interpreted in github.) Commonly used tags are:
|
||||
##
|
||||
## 'refactor' is obviously for refactoring code only
|
||||
## 'minor' is for a very meaningless change (a typo, adding a comment)
|
||||
## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...)
|
||||
## 'wip' is for partial functionality but complete subfunctionality.
|
||||
##
|
||||
## Example:
|
||||
##
|
||||
## new: usr: support of bazaar implemented
|
||||
## chg: re-indentend some lines !cosmetic
|
||||
## new: dev: updated code to be compatible with last version of killer lib.
|
||||
## fix: pkg: updated year of licence coverage.
|
||||
## new: test: added a bunch of test around user usability of feature X.
|
||||
## fix: typo in spelling my name in comment. !minor
|
||||
##
|
||||
## Please note that multi-line commit message are supported, and only the
|
||||
## first line will be considered as the "summary" of the commit message. So
|
||||
## tags, and other rules only applies to the summary. The body of the commit
|
||||
## message will be displayed in the changelog without reformatting.
|
||||
|
||||
|
||||
##
|
||||
## ``ignore_regexps`` is a line of regexps
|
||||
##
|
||||
## Any commit having its full commit message matching any regexp listed here
|
||||
## will be ignored and won't be reported in the changelog.
|
||||
##
|
||||
ignore_regexps = [
|
||||
r'@minor', r'!minor',
|
||||
r'@cosmetic', r'!cosmetic',
|
||||
r'@refactor', r'!refactor',
|
||||
r'@wip', r'!wip',
|
||||
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:',
|
||||
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:',
|
||||
r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$',
|
||||
]
|
||||
|
||||
|
||||
## ``section_regexps`` is a list of 2-tuples associating a string label and a
|
||||
## list of regexp
|
||||
##
|
||||
## Commit messages will be classified in sections thanks to this. Section
|
||||
## titles are the label, and a commit is classified under this section if any
|
||||
## of the regexps associated is matching.
|
||||
##
|
||||
## Please note that ``section_regexps`` will only classify commits and won't
|
||||
## make any changes to the contents. So you'll probably want to go check
|
||||
## ``subject_process`` (or ``body_process``) to do some changes to the subject,
|
||||
## whenever you are tweaking this variable.
|
||||
##
|
||||
section_regexps = [
|
||||
('New', [
|
||||
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
|
||||
]),
|
||||
('Changes', [
|
||||
r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
|
||||
]),
|
||||
('Fix', [
|
||||
r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
|
||||
]),
|
||||
|
||||
('Other', None ## Match all lines
|
||||
),
|
||||
|
||||
]
|
||||
|
||||
|
||||
## ``body_process`` is a callable
|
||||
##
|
||||
## This callable will be given the original body and result will
|
||||
## be used in the changelog.
|
||||
##
|
||||
## Available constructs are:
|
||||
##
|
||||
## - any python callable that take one txt argument and return txt argument.
|
||||
##
|
||||
## - ReSub(pattern, replacement): will apply regexp substitution.
|
||||
##
|
||||
## - Indent(chars=" "): will indent the text with the prefix
|
||||
## Please remember that template engines gets also to modify the text and
|
||||
## will usually indent themselves the text if needed.
|
||||
##
|
||||
## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns
|
||||
##
|
||||
## - noop: do nothing
|
||||
##
|
||||
## - ucfirst: ensure the first letter is uppercase.
|
||||
## (usually used in the ``subject_process`` pipeline)
|
||||
##
|
||||
## - final_dot: ensure text finishes with a dot
|
||||
## (usually used in the ``subject_process`` pipeline)
|
||||
##
|
||||
## - strip: remove any spaces before or after the content of the string
|
||||
##
|
||||
## - SetIfEmpty(msg="No commit message."): will set the text to
|
||||
## whatever given ``msg`` if the current text is empty.
|
||||
##
|
||||
## Additionally, you can `pipe` the provided filters, for instance:
|
||||
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ")
|
||||
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)')
|
||||
#body_process = noop
|
||||
body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip
|
||||
|
||||
|
||||
## ``subject_process`` is a callable
|
||||
##
|
||||
## This callable will be given the original subject and result will
|
||||
## be used in the changelog.
|
||||
##
|
||||
## Available constructs are those listed in ``body_process`` doc.
|
||||
subject_process = (strip |
|
||||
ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') |
|
||||
SetIfEmpty("No commit message.") | ucfirst | final_dot)
|
||||
|
||||
|
||||
## ``tag_filter_regexp`` is a regexp
|
||||
##
|
||||
## Tags that will be used for the changelog must match this regexp.
|
||||
##
|
||||
tag_filter_regexp = r'^v[0-9]+\.[0-9]+\.[0-9]+$'
|
||||
|
||||
|
||||
|
||||
## ``unreleased_version_label`` is a string or a callable that outputs a string
|
||||
##
|
||||
## This label will be used as the changelog Title of the last set of changes
|
||||
## between last valid tag and HEAD if any.
|
||||
unreleased_version_label = "%%version%% (unreleased)"
|
||||
|
||||
|
||||
## ``output_engine`` is a callable
|
||||
##
|
||||
## This will change the output format of the generated changelog file
|
||||
##
|
||||
## Available choices are:
|
||||
##
|
||||
## - rest_py
|
||||
##
|
||||
## Legacy pure python engine, outputs ReSTructured text.
|
||||
## This is the default.
|
||||
##
|
||||
## - mustache(<template_name>)
|
||||
##
|
||||
## Template name could be any of the available templates in
|
||||
## ``templates/mustache/*.tpl``.
|
||||
## Requires python package ``pystache``.
|
||||
## Examples:
|
||||
## - mustache("markdown")
|
||||
## - mustache("restructuredtext")
|
||||
##
|
||||
## - makotemplate(<template_name>)
|
||||
##
|
||||
## Template name could be any of the available templates in
|
||||
## ``templates/mako/*.tpl``.
|
||||
## Requires python package ``mako``.
|
||||
## Examples:
|
||||
## - makotemplate("restructuredtext")
|
||||
##
|
||||
#output_engine = rest_py
|
||||
#output_engine = mustache("restructuredtext")
|
||||
output_engine = mustache("markdown")
|
||||
#output_engine = makotemplate("restructuredtext")
|
||||
|
||||
|
||||
## ``include_merge`` is a boolean
|
||||
##
|
||||
## This option tells git-log whether to include merge commits in the log.
|
||||
## The default is to include them.
|
||||
include_merge = True
|
||||
|
||||
|
||||
## ``log_encoding`` is a string identifier
|
||||
##
|
||||
## This option tells gitchangelog what encoding is outputed by ``git log``.
|
||||
## The default is to be clever about it: it checks ``git config`` for
|
||||
## ``i18n.logOutputEncoding``, and if not found will default to git's own
|
||||
## default: ``utf-8``.
|
||||
#log_encoding = 'utf-8'
|
||||
|
||||
|
||||
## ``publish`` is a callable
|
||||
##
|
||||
## Sets what ``gitchangelog`` should do with the output generated by
|
||||
## the output engine. ``publish`` is a callable taking one argument
|
||||
## that is an interator on lines from the output engine.
|
||||
##
|
||||
## Some helper callable are provided:
|
||||
##
|
||||
## Available choices are:
|
||||
##
|
||||
## - stdout
|
||||
##
|
||||
## Outputs directly to standard output
|
||||
## (This is the default)
|
||||
##
|
||||
## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start())
|
||||
##
|
||||
## Creates a callable that will parse given file for the given
|
||||
## regex pattern and will insert the output in the file.
|
||||
## ``idx`` is a callable that receive the matching object and
|
||||
## must return a integer index point where to insert the
|
||||
## the output in the file. Default is to return the position of
|
||||
## the start of the matched string.
|
||||
##
|
||||
## - FileRegexSubst(file, pattern, replace, flags)
|
||||
##
|
||||
## Apply a replace inplace in the given file. Your regex pattern must
|
||||
## take care of everything and might be more complex. Check the README
|
||||
## for a complete copy-pastable example.
|
||||
##
|
||||
# publish = FileInsertIntoFirstRegexMatch(
|
||||
# "CHANGELOG.rst",
|
||||
# r'/(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/',
|
||||
# idx=lambda m: m.start(1)
|
||||
# )
|
||||
#publish = stdout
|
||||
|
||||
|
||||
## ``revs`` is a list of callable or a list of string
|
||||
##
|
||||
## callable will be called to resolve as strings and allow dynamical
|
||||
## computation of these. The result will be used as revisions for
|
||||
## gitchangelog (as if directly stated on the command line). This allows
|
||||
## to filter exaclty which commits will be read by gitchangelog.
|
||||
##
|
||||
## To get a full documentation on the format of these strings, please
|
||||
## refer to the ``git rev-list`` arguments. There are many examples.
|
||||
##
|
||||
## Using callables is especially useful, for instance, if you
|
||||
## are using gitchangelog to generate incrementally your changelog.
|
||||
##
|
||||
## Some helpers are provided, you can use them::
|
||||
##
|
||||
## - FileFirstRegexMatch(file, pattern): will return a callable that will
|
||||
## return the first string match for the given pattern in the given file.
|
||||
## If you use named sub-patterns in your regex pattern, it'll output only
|
||||
## the string matching the regex pattern named "rev".
|
||||
##
|
||||
## - Caret(rev): will return the rev prefixed by a "^", which is a
|
||||
## way to remove the given revision and all its ancestor.
|
||||
##
|
||||
## Please note that if you provide a rev-list on the command line, it'll
|
||||
## replace this value (which will then be ignored).
|
||||
##
|
||||
## If empty, then ``gitchangelog`` will act as it had to generate a full
|
||||
## changelog.
|
||||
##
|
||||
## The default is to use all commits to make the changelog.
|
||||
#revs = ["^1.0.3", ]
|
||||
#revs = [
|
||||
# Caret(
|
||||
# FileFirstRegexMatch(
|
||||
# "CHANGELOG.rst",
|
||||
# r"(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")),
|
||||
# "HEAD"
|
||||
#]
|
||||
revs = []
|
|
@ -0,0 +1,52 @@
|
|||
name: Python package
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9"]
|
||||
|
||||
steps:
|
||||
- run: |
|
||||
sudo apt-get install libpoppler-cpp-dev libzbar0 tesseract-ocr
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Cache Python dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('REQUIREMENTS') }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install flake8 pytest
|
||||
# pyfaul must be installed manually (?)
|
||||
pip install -r REQUIREMENTS pyfaup
|
||||
pip install .
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Run server in background
|
||||
misp-modules -l 127.0.0.1 -s &
|
||||
sleep 5
|
||||
# Check if modules are running
|
||||
curl -sS localhost:6666/modules
|
||||
# Run tests
|
||||
pytest tests
|
|
@ -0,0 +1,4 @@
|
|||
[submodule "misp_modules/lib/misp-objects"]
|
||||
path = misp_modules/lib/misp-objects
|
||||
url = https://github.com/MISP/misp-objects.git
|
||||
branch = main
|
|
@ -11,9 +11,6 @@ python:
|
|||
- "3.7-dev"
|
||||
- "3.8-dev"
|
||||
|
||||
before_install:
|
||||
- docker build -t misp-modules --build-arg BUILD_DATE=$(date -u +"%Y-%m-%d") docker/
|
||||
|
||||
install:
|
||||
- sudo apt-get install libzbar0 libzbar-dev libpoppler-cpp-dev tesseract-ocr libfuzzy-dev libcaca-dev liblua5.3-dev
|
||||
- pip install pipenv
|
||||
|
@ -52,7 +49,7 @@ script:
|
|||
- nosetests --with-coverage --cover-package=misp_modules
|
||||
- kill -s KILL $pid
|
||||
- pip install flake8
|
||||
- flake8 --ignore=E501,W503,E226 misp_modules
|
||||
- flake8 --ignore=E501,W503,E226,E126 misp_modules
|
||||
|
||||
after_success:
|
||||
- coverage combine .coverage*
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
mkdocs
|
||||
pymdown-extensions
|
||||
mkdocs-material
|
36
Pipfile
|
@ -17,40 +17,41 @@ passivetotal = "*"
|
|||
pypdns = "*"
|
||||
pypssl = "*"
|
||||
pyeupi = "*"
|
||||
uwhois = { editable = true, git = "https://github.com/Rafiot/uwhoisd.git", ref = "testing", subdirectory = "client" }
|
||||
pymisp = { extras = ["fileobjects,openioc,pdfexport,email"], version = "*" }
|
||||
pyonyphe = { editable = true, git = "https://github.com/sebdraven/pyonyphe" }
|
||||
pydnstrails = { editable = true, git = "https://github.com/sebdraven/pydnstrails" }
|
||||
pymisp = { extras = ["fileobjects,openioc,pdfexport,email,url"], version = "*" }
|
||||
pyonyphe = { git = "https://github.com/sebdraven/pyonyphe" }
|
||||
pydnstrails = { git = "https://github.com/sebdraven/pydnstrails" }
|
||||
pytesseract = "*"
|
||||
pygeoip = "*"
|
||||
beautifulsoup4 = "*"
|
||||
oauth2 = "*"
|
||||
yara-python = "==3.8.1"
|
||||
sigmatools = "*"
|
||||
stix2 = "*"
|
||||
stix2-patterns = "*"
|
||||
taxii2-client = "*"
|
||||
maclookup = "*"
|
||||
vulners = "*"
|
||||
blockchain = "*"
|
||||
reportlab = "*"
|
||||
pyintel471 = { editable = true, git = "https://github.com/MISP/PyIntel471.git" }
|
||||
pyintel471 = { git = "https://github.com/MISP/PyIntel471.git" }
|
||||
shodan = "*"
|
||||
Pillow = "*"
|
||||
Pillow = ">=8.2.0"
|
||||
Wand = "*"
|
||||
SPARQLWrapper = "*"
|
||||
domaintools_api = "*"
|
||||
misp-modules = { editable = true, path = "." }
|
||||
pybgpranking = { editable = true, git = "https://github.com/D4-project/BGP-Ranking.git/", subdirectory = "client" }
|
||||
pyipasnhistory = { editable = true, git = "https://github.com/D4-project/IPASN-History.git/", subdirectory = "client" }
|
||||
misp-modules = { path = "." }
|
||||
pybgpranking = { git = "https://github.com/D4-project/BGP-Ranking.git/", subdirectory = "client", ref = "68de39f6c5196f796055c1ac34504054d688aa59" }
|
||||
pyipasnhistory = { git = "https://github.com/D4-project/IPASN-History.git/", subdirectory = "client", ref = "a2853c39265cecdd0c0d16850bd34621c0551b87" }
|
||||
backscatter = "*"
|
||||
pyzbar = "*"
|
||||
opencv-python = "*"
|
||||
np = "*"
|
||||
ODTReader = { editable = true, git = "https://github.com/cartertemm/ODTReader.git/" }
|
||||
ODTReader = { git = "https://github.com/cartertemm/ODTReader.git/" }
|
||||
python-pptx = "*"
|
||||
python-docx = "*"
|
||||
ezodf = "*"
|
||||
pandas = "*"
|
||||
pandas_ods_reader = "*"
|
||||
pandas = "==1.3.5"
|
||||
pandas_ods_reader = "==0.1.2"
|
||||
pdftotext = "*"
|
||||
lxml = "*"
|
||||
xlrd = "*"
|
||||
|
@ -60,11 +61,18 @@ geoip2 = "*"
|
|||
apiosintDS = "*"
|
||||
assemblyline_client = "*"
|
||||
vt-graph-api = "*"
|
||||
trustar = "*"
|
||||
trustar = { git = "https://github.com/SteveClement/trustar-python.git" }
|
||||
markdownify = "==0.5.3"
|
||||
socialscan = "*"
|
||||
dnsdb2 = "*"
|
||||
clamd = "*"
|
||||
aiohttp = ">=3.7.4"
|
||||
tau-clients = "*"
|
||||
vt-py = ">=0.7.1"
|
||||
crowdstrike-falconpy = "0.9.0"
|
||||
censys = "2.0.9"
|
||||
mwdblib = "3.4.1"
|
||||
ndjson = "0.3.1"
|
||||
|
||||
[requires]
|
||||
python_version = "3"
|
||||
python_version = "3.7"
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# MISP modules
|
||||
|
||||
[](https://travis-ci.org/MISP/misp-modules)
|
||||
[](https://coveralls.io/github/MISP/misp-modules?branch=main)
|
||||
[](https://github.com/MISP/misp-modules/actions/workflows/python-package.yml)[](https://coveralls.io/github/MISP/misp-modules?branch=main)
|
||||
[](https://codecov.io/gh/MISP/misp-modules)
|
||||
|
||||
MISP modules are autonomous modules that can be used to extend [MISP](https://github.com/MISP/MISP) for new services such as expansion, import and export.
|
||||
|
@ -58,6 +57,8 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj
|
|||
* [macaddress.io](misp_modules/modules/expansion/macaddress_io.py) - a hover module to retrieve vendor details and other information regarding a given MAC address or an OUI from [MAC address Vendor Lookup](https://macaddress.io). See [integration tutorial here](https://macaddress.io/integrations/MISP-module).
|
||||
* [macvendors](misp_modules/modules/expansion/macvendors.py) - a hover module to retrieve mac vendor information.
|
||||
* [MALWAREbazaar](misp_modules/modules/expansion/malwarebazaar.py) - an expansion module to query MALWAREbazaar with some payload.
|
||||
* [McAfee MVISION Insights](misp_modules/modules/expansion/mcafee_insights_enrich.py) - an expansion module enrich IOCs with McAfee MVISION Insights.
|
||||
* [Mmdb server lookup](misp_modules/modules/expansion/mmdb_lookup.py) - an expansion module to enrich an ip with geolocation information from an mmdb server such as ip.circl.lu.
|
||||
* [ocr-enrich](misp_modules/modules/expansion/ocr_enrich.py) - an enrichment module to get OCRized data from images into MISP.
|
||||
* [ods-enrich](misp_modules/modules/expansion/ods_enrich.py) - an enrichment module to get text out of OpenOffice spreadsheet document into MISP (using free-text parser).
|
||||
* [odt-enrich](misp_modules/modules/expansion/odt_enrich.py) - an enrichment module to get text out of OpenOffice document into MISP (using free-text parser).
|
||||
|
@ -87,6 +88,7 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj
|
|||
* [virustotal](misp_modules/modules/expansion/virustotal.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a high request rate limit required. (More details about the API: [here](https://developers.virustotal.com/reference))
|
||||
* [virustotal_public](misp_modules/modules/expansion/virustotal_public.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a public key and a low request rate limit. (More details about the API: [here](https://developers.virustotal.com/reference))
|
||||
* [VMray](misp_modules/modules/expansion/vmray_submit.py) - a module to submit a sample to VMray.
|
||||
* [VMware NSX](misp_modules/modules/expansion/vmware_nsx.py) - a module to enrich a file or URL with VMware NSX Defender.
|
||||
* [VulnDB](misp_modules/modules/expansion/vulndb.py) - a module to query [VulnDB](https://www.riskbasedsecurity.com/).
|
||||
* [Vulners](misp_modules/modules/expansion/vulners.py) - an expansion module to expand information about CVEs using Vulners API.
|
||||
* [whois](misp_modules/modules/expansion/whois.py) - a module to query a local instance of [uwhois](https://github.com/rafiot/uwhoisd).
|
||||
|
|
228
REQUIREMENTS
|
@ -6,141 +6,169 @@
|
|||
#
|
||||
|
||||
-i https://pypi.org/simple
|
||||
-e .
|
||||
-e git+https://github.com/D4-project/BGP-Ranking.git/@fd9c0e03af9b61d4bf0b67ac73c7208a55178a54#egg=pybgpranking&subdirectory=client
|
||||
-e git+https://github.com/D4-project/IPASN-History.git/@fc5e48608afc113e101ca6421bf693b7b9753f9e#egg=pyipasnhistory&subdirectory=client
|
||||
-e git+https://github.com/MISP/PyIntel471.git@0df8d51f1c1425de66714b3a5a45edb69b8cc2fc#egg=pyintel471
|
||||
-e git+https://github.com/Rafiot/uwhoisd.git@783bba09b5a6964f25566089826a1be4b13f2a22#egg=uwhois&subdirectory=client
|
||||
-e git+https://github.com/cartertemm/ODTReader.git/@49d6938693f6faa3ff09998f86dba551ae3a996b#egg=odtreader
|
||||
-e git+https://github.com/sebdraven/pydnstrails@48c1f740025c51289f43a24863d1845ff12fd21a#egg=pydnstrails
|
||||
-e git+https://github.com/sebdraven/pyonyphe@1ce15581beebb13e841193a08a2eb6f967855fcb#egg=pyonyphe
|
||||
aiohttp==3.7.3; python_version >= '3.6'
|
||||
.
|
||||
aiohttp==3.8.1
|
||||
aiosignal==1.2.0; python_version >= '3.6'
|
||||
antlr4-python3-runtime==4.8; python_version >= '3'
|
||||
apiosintds==1.8.3
|
||||
appdirs==1.4.4
|
||||
argparse==1.4.0
|
||||
assemblyline-client==4.0.1
|
||||
async-timeout==3.0.1; python_full_version >= '3.5.3'
|
||||
attrs==20.3.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
assemblyline-client==4.2.2
|
||||
async-timeout==4.0.2; python_version >= '3.6'
|
||||
asynctest==0.13.0; python_version < '3.8'
|
||||
attrs==21.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
backoff==1.11.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
backports.zoneinfo==0.2.1; python_version < '3.9'
|
||||
backscatter==0.2.4
|
||||
beautifulsoup4==4.9.3
|
||||
bidict==0.21.2; python_version >= '3.6'
|
||||
beautifulsoup4==4.10.0
|
||||
bidict==0.21.4; python_version >= '3.6'
|
||||
blockchain==1.4.4
|
||||
censys==1.1.1
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.4
|
||||
chardet==3.0.4
|
||||
censys==2.1.2
|
||||
certifi==2021.10.8
|
||||
cffi==1.15.0
|
||||
chardet==4.0.0
|
||||
charset-normalizer==2.0.12; python_version >= '3'
|
||||
clamd==1.0.2
|
||||
click-plugins==1.1.1
|
||||
click==7.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
click==8.0.4; python_version >= '3.6'
|
||||
colorama==0.4.4; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
colorclass==2.2.0
|
||||
colorclass==2.2.2; python_version >= '2.6'
|
||||
commonmark==0.9.1
|
||||
compressed-rtf==1.0.6
|
||||
configparser==5.0.1; python_version >= '3.6'
|
||||
cryptography==3.3.1
|
||||
decorator==4.4.2
|
||||
deprecated==1.2.11; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
dnsdb2==1.1.2
|
||||
dnspython3==1.15.0
|
||||
domaintools-api==0.5.2
|
||||
easygui==0.98.1
|
||||
configparser==5.2.0; python_version >= '3.6'
|
||||
crowdstrike-falconpy==1.0.5
|
||||
cryptography==36.0.1; python_version >= '3.6'
|
||||
decorator==5.1.1; python_version >= '3.5'
|
||||
deprecated==1.2.13; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
dnsdb2==1.1.4
|
||||
dnspython==2.2.0
|
||||
dnspython3
|
||||
domaintools-api==0.6.1
|
||||
easygui==0.98.2
|
||||
ebcdic==1.1.1
|
||||
enum-compat==0.0.3
|
||||
extract-msg==0.28.1
|
||||
ez-setup==0.9
|
||||
extract-msg==0.30.8
|
||||
ezodf==0.3.2
|
||||
filelock==3.6.0; python_version >= '3.7'
|
||||
frozenlist==1.3.0; python_version >= '3.7'
|
||||
future==0.18.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
futures==3.1.1
|
||||
geoip2==4.1.0
|
||||
httplib2==0.18.1
|
||||
geoip2==4.5.0
|
||||
git+https://github.com/D4-project/BGP-Ranking.git/@68de39f6c5196f796055c1ac34504054d688aa59#egg=pybgpranking&subdirectory=client
|
||||
git+https://github.com/D4-project/IPASN-History.git/@a2853c39265cecdd0c0d16850bd34621c0551b87#egg=pyipasnhistory&subdirectory=client
|
||||
git+https://github.com/MISP/PyIntel471.git@917272fafa8e12102329faca52173e90c5256968#egg=pyintel471
|
||||
git+https://github.com/SteveClement/trustar-python.git@6954eae38e0c77eaeef26084b6c5fd033925c1c7#egg=trustar
|
||||
git+https://github.com/cartertemm/ODTReader.git/@49d6938693f6faa3ff09998f86dba551ae3a996b#egg=odtreader
|
||||
git+https://github.com/sebdraven/pydnstrails@48c1f740025c51289f43a24863d1845ff12fd21a#egg=pydnstrails
|
||||
git+https://github.com/sebdraven/pyonyphe@aed008ee5a27e3a5e4afbb3e5cbfc47170108452#egg=pyonyphe
|
||||
httplib2==0.20.4; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
idna-ssl==1.1.0; python_version < '3.7'
|
||||
idna==2.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
imapclient==2.1.0
|
||||
isodate==0.6.0
|
||||
jbxapi==3.14.0
|
||||
json-log-formatter==0.3.0
|
||||
jsonschema==3.2.0
|
||||
ndjson==0.3.1
|
||||
lark-parser==0.11.1
|
||||
lief==0.11.0
|
||||
lxml==4.6.2
|
||||
idna==3.3; python_version >= '3'
|
||||
imapclient==2.2.0
|
||||
importlib-metadata==4.11.2; python_version < '3.8'
|
||||
importlib-resources==5.4.0; python_version < '3.9'
|
||||
isodate==0.6.1
|
||||
itsdangerous==2.1.0; python_version >= '3.7'
|
||||
jbxapi==3.17.2
|
||||
jeepney==0.7.1; sys_platform == 'linux'
|
||||
json-log-formatter==0.5.1
|
||||
jsonschema==4.4.0; python_version >= '3.7'
|
||||
keyring==23.5.0; python_version >= '3.7'
|
||||
lark-parser==0.12.0
|
||||
lief==0.11.5
|
||||
lxml==4.8.0
|
||||
maclookup==1.0.3
|
||||
markdownify==0.5.3
|
||||
maxminddb==2.0.3; python_version >= '3.6'
|
||||
msoffcrypto-tool==4.11.0
|
||||
multidict==5.1.0; python_version >= '3.6'
|
||||
maxminddb==2.2.0; python_version >= '3.6'
|
||||
more-itertools==8.12.0; python_version >= '3.5'
|
||||
msoffcrypto-tool==5.0.0; python_version >= '3' and platform_python_implementation != 'PyPy' or (platform_system != 'Windows' and platform_system != 'Darwin')
|
||||
multidict==6.0.2; python_version >= '3.7'
|
||||
mwdblib==4.1.0
|
||||
ndjson==0.3.1
|
||||
np==1.0.2
|
||||
numpy==1.19.5; python_version >= '3.6'
|
||||
numpy==1.21.5; python_version < '3.10' and platform_machine != 'aarch64' and platform_machine != 'arm64'
|
||||
oauth2==1.9.0.post1
|
||||
olefile==0.46; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
oletools==0.56
|
||||
opencv-python==4.5.1.48
|
||||
openpyxl
|
||||
pandas-ods-reader==0.0.7
|
||||
pandas==1.1.5
|
||||
passivetotal==1.0.31
|
||||
oletools==0.60
|
||||
opencv-python==4.5.5.62
|
||||
packaging==21.3; python_version >= '3.6'
|
||||
pandas-ods-reader==0.1.2
|
||||
pandas==1.3.5
|
||||
passivetotal==2.5.8
|
||||
pcodedmp==1.2.6
|
||||
pdftotext==2.1.5
|
||||
pillow==8.1.0
|
||||
progressbar2==3.53.1
|
||||
psutil==5.8.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pycparser==2.20; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pycryptodome==3.9.9; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pycryptodomex==3.9.9; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pydeep==0.4
|
||||
pdftotext==2.2.2
|
||||
pillow==9.0.1
|
||||
progressbar2==4.0.0; python_version >= '3.7'
|
||||
psutil==5.9.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pycparser==2.21
|
||||
pycryptodome==3.14.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
pycryptodomex==3.14.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
pydeep2==0.5.1
|
||||
pyeupi==1.1
|
||||
pyfaup==1.2
|
||||
pygeoip==0.3.2
|
||||
pymisp[email,fileobjects,openioc,pdfexport]==2.4.137.1
|
||||
pyopenssl==20.0.1
|
||||
pygments==2.11.2; python_version >= '3.5'
|
||||
pymisp[email,fileobjects,openioc,pdfexport,url]==2.4.155.1
|
||||
pyparsing==2.4.7; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
pypdns==1.5.1
|
||||
pypssl==2.1
|
||||
pyrsistent==0.17.3; python_version >= '3.5'
|
||||
pytesseract==0.3.7
|
||||
pypdns==1.5.2
|
||||
pypssl==2.2
|
||||
pyrsistent==0.18.1; python_version >= '3.7'
|
||||
pytesseract==0.3.9
|
||||
python-baseconv==1.2.2
|
||||
python-dateutil==2.8.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
python-docx==0.8.10
|
||||
python-engineio==4.0.0
|
||||
python-magic==0.4.18
|
||||
python-pptx==0.6.18
|
||||
python-socketio[client]==5.0.4
|
||||
python-utils==2.5.2
|
||||
python-dateutil==2.8.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
python-docx==0.8.11
|
||||
python-engineio==4.3.1; python_version >= '3.6'
|
||||
python-magic==0.4.25
|
||||
python-pptx==0.6.21
|
||||
python-socketio[client]==5.5.2; python_version >= '3.6'
|
||||
python-utils==3.1.0; python_version >= '3.7'
|
||||
pytz-deprecation-shim==0.1.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
pytz==2019.3
|
||||
pyyaml==5.4.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
pyeti-python3==1.0.0
|
||||
pyyaml==6.0; python_version >= '3.6'
|
||||
pyzbar==0.1.8
|
||||
pyzipper==0.3.4; python_version >= '3.5'
|
||||
rdflib==5.0.0
|
||||
redis==3.5.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
reportlab==3.5.59
|
||||
requests-cache==0.5.2
|
||||
requests[security]==2.25.1
|
||||
pyzipper==0.3.5; python_version >= '3.5'
|
||||
rdflib==6.1.1; python_version >= '3.7'
|
||||
redis==4.1.4; python_version >= '3.6'
|
||||
reportlab==3.6.8
|
||||
requests-cache==0.6.4; python_version >= '3.6'
|
||||
requests-file==1.5.1
|
||||
requests==2.27.1
|
||||
rich==11.2.0; python_version < '4.0' and python_full_version >= '3.6.2'
|
||||
rtfde==0.0.2
|
||||
shodan==1.24.0
|
||||
sigmatools==0.18.1
|
||||
six==1.15.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
socialscan==1.4.1
|
||||
secretstorage==3.3.1; sys_platform == 'linux'
|
||||
setuptools==60.9.3; python_version >= '3.7'
|
||||
shodan==1.27.0
|
||||
sigmatools==0.19.1
|
||||
simplejson==3.17.6; python_version >= '2.5' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
socialscan==1.4.2
|
||||
socketio-client==0.5.7.4
|
||||
soupsieve==2.1; python_version >= '3'
|
||||
soupsieve==2.3.1; python_version >= '3.6'
|
||||
sparqlwrapper==1.8.5
|
||||
stix2-patterns==1.3.2
|
||||
tabulate==0.8.7
|
||||
stix2==3.0.1
|
||||
tabulate==0.8.9
|
||||
tau-clients==0.2.1
|
||||
taxii2-client==2.3.0
|
||||
tldextract==3.2.0; python_version >= '3.7'
|
||||
tornado==6.1; python_version >= '3.5'
|
||||
tqdm==4.56.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
trustar==0.3.34
|
||||
typing-extensions==3.7.4.3; python_version < '3.8'
|
||||
tzlocal==2.1
|
||||
tqdm==4.63.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||
typing-extensions==4.1.1; python_version < '3.8'
|
||||
tzdata==2021.5; python_version >= '3.6'
|
||||
tzlocal==4.1; python_version >= '3.6'
|
||||
unicodecsv==0.14.1
|
||||
url-normalize==1.4.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||
urlarchiver==0.2
|
||||
urllib3==1.26.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4.0'
|
||||
urllib3==1.26.8; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4.0'
|
||||
validators==0.14.0
|
||||
vt-graph-api==1.0.1
|
||||
vulners==1.5.9
|
||||
wand==0.6.5
|
||||
websocket-client==0.57.0
|
||||
wrapt==1.12.1
|
||||
vt-graph-api==1.1.3
|
||||
vt-py==0.13.1
|
||||
vulners==2.0.2
|
||||
wand==0.6.7
|
||||
websocket-client==1.3.1; python_version >= '3.6'
|
||||
wrapt==1.13.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||
xlrd==2.0.1
|
||||
xlsxwriter==1.3.7
|
||||
xlsxwriter==3.0.3; python_version >= '3.4'
|
||||
yara-python==3.8.1
|
||||
yarl==1.6.3; python_version >= '3.6'
|
||||
yarl==1.7.2; python_version >= '3.6'
|
||||
zipp==3.7.0; python_version >= '3.7'
|
||||
|
||||
|
|
122
docs/index.md
|
@ -1,4 +1,120 @@
|
|||
# Home
|
||||
|
||||
- [expansion](./expansion)
|
||||
- [export](./export_mod)
|
||||
- [import](./import_mod)
|
||||
[](https://travis-ci.org/MISP/misp-modules)
|
||||
[](https://coveralls.io/github/MISP/misp-modules?branch=master)
|
||||
[](https://codecov.io/gh/MISP/misp-modules)
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2FMISP%2Fmisp-modules?ref=badge_shield)
|
||||
|
||||
MISP modules are autonomous modules that can be used for expansion and other services in [MISP](https://github.com/MISP/MISP).
|
||||
|
||||
The modules are written in Python 3 following a simple API interface. The objective is to ease the extensions of MISP functionalities
|
||||
without modifying core components. The API is available via a simple REST API which is independent from MISP installation or configuration.
|
||||
|
||||
MISP modules support is included in MISP starting from version `2.4.28`.
|
||||
|
||||
For more information: [Extending MISP with Python modules](https://www.circl.lu/assets/files/misp-training/switch2016/2-misp-modules.pdf) slides from MISP training.
|
||||
|
||||
|
||||
## Existing MISP modules
|
||||
|
||||
### Expansion modules
|
||||
|
||||
* [Backscatter.io](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/backscatter_io.py) - a hover and expansion module to expand an IP address with mass-scanning observations.
|
||||
* [BGP Ranking](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/bgpranking.py) - a hover and expansion module to expand an AS number with the ASN description, its history, and position in BGP Ranking.
|
||||
* [BTC scam check](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/btc_scam_check.py) - An expansion hover module to instantly check if a BTC address has been abused.
|
||||
* [BTC transactions](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/btc_steroids.py) - An expansion hover module to get a blockchain balance and the transactions from a BTC address in MISP.
|
||||
* [CIRCL Passive DNS](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/circl_passivedns.py) - a hover and expansion module to expand hostname and IP addresses with passive DNS information.
|
||||
* [CIRCL Passive SSL](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/circl_passivessl.py) - a hover and expansion module to expand IP addresses with the X.509 certificate seen.
|
||||
* [countrycode](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/countrycode.py) - a hover module to tell you what country a URL belongs to.
|
||||
* [CrowdStrike Falcon](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/crowdstrike_falcon.py) - an expansion module to expand using CrowdStrike Falcon Intel Indicator API.
|
||||
* [CVE](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/cve.py) - a hover module to give more information about a vulnerability (CVE).
|
||||
* [CVE advanced](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/cve_advanced.py) - An expansion module to query the CIRCL CVE search API for more information about a vulnerability (CVE).
|
||||
* [Cuckoo submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/cuckoo_submit.py) - A hover module to submit malware sample, url, attachment, domain to Cuckoo Sandbox.
|
||||
* [DBL Spamhaus](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/dbl_spamhaus.py) - a hover module to check Spamhaus DBL for a domain name.
|
||||
* [DNS](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/dns.py) - a simple module to resolve MISP attributes like hostname and domain to expand IP addresses attributes.
|
||||
* [docx-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/docx-enrich.py) - an enrichment module to get text out of Word document into MISP (using free-text parser).
|
||||
* [DomainTools](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/domaintools.py) - a hover and expansion module to get information from [DomainTools](http://www.domaintools.com/) whois.
|
||||
* [EUPI](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/eupi.py) - a hover and expansion module to get information about an URL from the [Phishing Initiative project](https://phishing-initiative.eu/?lang=en).
|
||||
* [EQL](misp_modules/modules/expansion/eql.py) - an expansion module to generate event query language (EQL) from an attribute. [Event Query Language](https://eql.readthedocs.io/en/latest/)
|
||||
* [Farsight DNSDB Passive DNS](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/farsight_passivedns.py) - a hover and expansion module to expand hostname and IP addresses with passive DNS information.
|
||||
* [GeoIP](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/geoip_country.py) - a hover and expansion module to get GeoIP information from geolite/maxmind.
|
||||
* [Greynoise](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/greynoise.py) - a hover to get information from greynoise.
|
||||
* [hashdd](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/hashdd.py) - a hover module to check file hashes against [hashdd.com](http://www.hashdd.com) including NSLR dataset.
|
||||
* [hibp](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/hibp.py) - a hover module to lookup against Have I Been Pwned?
|
||||
* [intel471](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/intel471.py) - an expansion module to get info from [Intel471](https://intel471.com).
|
||||
* [IPASN](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/ipasn.py) - a hover and expansion to get the BGP ASN of an IP address.
|
||||
* [iprep](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/iprep.py) - an expansion module to get IP reputation from packetmail.net.
|
||||
* [Joe Sandbox submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_submit.py) - Submit files and URLs to Joe Sandbox.
|
||||
* [Joe Sandbox query](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_query.py) - Query Joe Sandbox with the link of an analysis and get the parsed data.
|
||||
* [macaddress.io](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/macaddress_io.py) - a hover module to retrieve vendor details and other information regarding a given MAC address or an OUI from [MAC address Vendor Lookup](https://macaddress.io). See [integration tutorial here](https://macaddress.io/integrations/MISP-module).
|
||||
* [macvendors](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/macvendors.py) - a hover module to retrieve mac vendor information.
|
||||
* [ocr-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/ocr-enrich.py) - an enrichment module to get OCRized data from images into MISP.
|
||||
* [ods-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/ods-enrich.py) - an enrichment module to get text out of OpenOffice spreadsheet document into MISP (using free-text parser).
|
||||
* [odt-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/odt-enrich.py) - an enrichment module to get text out of OpenOffice document into MISP (using free-text parser).
|
||||
* [onyphe](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/onyphe.py) - a modules to process queries on Onyphe.
|
||||
* [onyphe_full](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/onyphe_full.py) - a modules to process full queries on Onyphe.
|
||||
* [OTX](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/otx.py) - an expansion module for [OTX](https://otx.alienvault.com/).
|
||||
* [passivetotal](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/passivetotal.py) - a [passivetotal](https://www.passivetotal.org/) module that queries a number of different PassiveTotal datasets.
|
||||
* [pdf-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/pdf-enrich.py) - an enrichment module to extract text from PDF into MISP (using free-text parser).
|
||||
* [pptx-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/pptx-enrich.py) - an enrichment module to get text out of PowerPoint document into MISP (using free-text parser).
|
||||
* [qrcode](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/qrcode.py) - a module decode QR code, barcode and similar codes from an image and enrich with the decoded values.
|
||||
* [rbl](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/rbl.py) - a module to get RBL (Real-Time Blackhost List) values from an attribute.
|
||||
* [reversedns](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/reversedns.py) - Simple Reverse DNS expansion service to resolve reverse DNS from MISP attributes.
|
||||
* [securitytrails](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/securitytrails.py) - an expansion module for [securitytrails](https://securitytrails.com/).
|
||||
* [shodan](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/shodan.py) - a minimal [shodan](https://www.shodan.io/) expansion module.
|
||||
* [Sigma queries](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/sigma_queries.py) - Experimental expansion module querying a sigma rule to convert it into all the available SIEM signatures.
|
||||
* [Sigma syntax validator](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/sigma_syntax_validator.py) - Sigma syntax validator.
|
||||
* [sourcecache](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/sourcecache.py) - a module to cache a specific link from a MISP instance.
|
||||
* [STIX2 pattern syntax validator](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/stix2_pattern_syntax_validator.py) - a module to check a STIX2 pattern syntax.
|
||||
* [ThreatCrowd](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/threatcrowd.py) - an expansion module for [ThreatCrowd](https://www.threatcrowd.org/).
|
||||
* [threatminer](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/threatminer.py) - an expansion module to expand from [ThreatMiner](https://www.threatminer.org/).
|
||||
* [urlhaus](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/urlhaus.py) - Query urlhaus to get additional data about a domain, hash, hostname, ip or url.
|
||||
* [urlscan](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/urlscan.py) - an expansion module to query [urlscan.io](https://urlscan.io).
|
||||
* [virustotal](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/virustotal.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a high request rate limit required. (More details about the API: [here](https://developers.virustotal.com/reference))
|
||||
* [virustotal_public](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/virustotal_public.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a public key and a low request rate limit. (More details about the API: [here](https://developers.virustotal.com/reference))
|
||||
* [VMray](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/vmray_submit.py) - a module to submit a sample to VMray.
|
||||
* [VulnDB](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/vulndb.py) - a module to query [VulnDB](https://www.riskbasedsecurity.com/).
|
||||
* [Vulners](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/vulners.py) - an expansion module to expand information about CVEs using Vulners API.
|
||||
* [whois](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/whois.py) - a module to query a local instance of [uwhois](https://github.com/rafiot/uwhoisd).
|
||||
* [wikidata](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/wiki.py) - a [wikidata](https://www.wikidata.org) expansion module.
|
||||
* [xforce](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/xforceexchange.py) - an IBM X-Force Exchange expansion module.
|
||||
* [xlsx-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/xlsx-enrich.py) - an enrichment module to get text out of an Excel document into MISP (using free-text parser).
|
||||
* [YARA query](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/yara_query.py) - a module to create YARA rules from single hash attributes.
|
||||
* [YARA syntax validator](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/yara_syntax_validator.py) - YARA syntax validator.
|
||||
|
||||
### Export modules
|
||||
|
||||
* [CEF](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/cef_export.py) module to export Common Event Format (CEF).
|
||||
* [Cisco FireSight Manager ACL rule](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/cisco_firesight_manager_ACL_rule_export.py) module to export as rule for the Cisco FireSight manager ACL.
|
||||
* [GoAML export](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/goamlexport.py) module to export in [GoAML format](http://goaml.unodc.org/goaml/en/index.html).
|
||||
* [Lite Export](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/liteexport.py) module to export a lite event.
|
||||
* [Mass EQL Export](misp_modules/modules/export_mod/mass_eql_export.py) module to export applicable attributes from an event to a mass EQL query.
|
||||
* [PDF export](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/pdfexport.py) module to export an event in PDF.
|
||||
* [Nexthink query format](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/nexthinkexport.py) module to export in Nexthink query format.
|
||||
* [osquery](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/osqueryexport.py) module to export in [osquery](https://osquery.io/) query format.
|
||||
* [ThreatConnect](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/threat_connect_export.py) module to export in ThreatConnect CSV format.
|
||||
* [ThreatStream](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/threatStream_misp_export.py) module to export in ThreatStream format.
|
||||
|
||||
### Import modules
|
||||
|
||||
* [CSV import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/csvimport.py) Customizable CSV import module.
|
||||
* [Cuckoo JSON](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/cuckooimport.py) Cuckoo JSON import.
|
||||
* [Email Import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/email_import.py) Email import module for MISP to import basic metadata.
|
||||
* [GoAML import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/goamlimport.py) Module to import [GoAML](http://goaml.unodc.org/goaml/en/index.html) XML format.
|
||||
* [Joe Sandbox import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/joe_import.py) Parse data from a Joe Sandbox json report.
|
||||
* [OCR](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/ocr.py) Optical Character Recognition (OCR) module for MISP to import attributes from images, scan or faxes.
|
||||
* [OpenIOC](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/openiocimport.py) OpenIOC import based on PyMISP library.
|
||||
* [ThreatAnalyzer](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/threatanalyzer_import.py) - An import module to process ThreatAnalyzer archive.zip/analysis.json sandbox exports.
|
||||
* [VMRay](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/vmray_import.py) - An import module to process VMRay export.
|
||||
|
||||
|
||||
## How to contribute your own module?
|
||||
|
||||
Fork the project, add your module, test it and make a pull-request. Modules can be also private as you can add a module in your own MISP installation.
|
||||
For further information please see [Contribute](contribute/).
|
||||
|
||||
|
||||
## Licenses
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2FMISP%2Fmisp-modules?ref=badge_large)
|
||||
|
||||
For further Information see also the [license file](license/).
|
|
@ -606,18 +606,19 @@ Module to query a local copy of Maxmind's Geolite database.
|
|||
|
||||
<img src=logos/greynoise.png height=60>
|
||||
|
||||
Module to access GreyNoise.io API
|
||||
Module to query IP and CVE information from GreyNoise
|
||||
- **features**:
|
||||
>The module takes an IP address as input and queries Greynoise for some additional information about it: basically it checks whether a given IP address is “Internet background noise”, or has been observed scanning or attacking devices across the Internet. The result is returned as text.
|
||||
>This module supports: 1) Query an IP from GreyNoise to see if it is internet background noise or a common business service 2) Query a CVE from GreyNoise to see the total number of internet scanners looking for the CVE in the last 7 days.
|
||||
- **input**:
|
||||
>An IP address.
|
||||
>An IP address or CVE ID
|
||||
- **output**:
|
||||
>Additional information about the IP fetched from Greynoise API.
|
||||
>IP Lookup information or CVE scanning profile for past 7 days
|
||||
- **references**:
|
||||
> - https://greynoise.io/
|
||||
> - https://github.com/GreyNoise-Intelligence/api.greynoise.io
|
||||
> - https://docs.greyniose.io/
|
||||
> - https://www.greynoise.io/viz/account/
|
||||
- **requirements**:
|
||||
>A Greynoise API key.
|
||||
>A Greynoise API key. Both Enterprise (Paid) and Community (Free) API keys are supported, however Community API users will only be able to perform IP lookups.
|
||||
|
||||
-----
|
||||
|
||||
|
@ -635,6 +636,25 @@ A hover module to check hashes against hashdd.com including NSLR dataset.
|
|||
|
||||
-----
|
||||
|
||||
#### [hashlookup](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/hashlookup.py)
|
||||
|
||||
<img src=logos/circl.png height=60>
|
||||
|
||||
An expansion module to query the CIRCL hashlookup services to find it if a hash is part of a known set such as NSRL.
|
||||
- **features**:
|
||||
>The module takes file hashes as input such as a MD5 or SHA1.
|
||||
> It queries the public CIRCL.lu hashlookup service and return all the hits if the hashes are known in an existing dataset. The module can be configured with a custom hashlookup url if required.
|
||||
> The module can be used an hover module but also an expansion model to add related MISP objects.
|
||||
>
|
||||
- **input**:
|
||||
>File hashes (MD5, SHA1)
|
||||
- **output**:
|
||||
>Object with the filename associated hashes if the hash is part of a known set.
|
||||
- **references**:
|
||||
>https://www.circl.lu/services/hashlookup/
|
||||
|
||||
-----
|
||||
|
||||
#### [hibp](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/hibp.py)
|
||||
|
||||
<img src=logos/hibp.png height=60>
|
||||
|
@ -734,6 +754,26 @@ Module to query an IP ASN history service (https://github.com/D4-project/IPASN-H
|
|||
|
||||
-----
|
||||
|
||||
#### [ipqs_fraud_and_risk_scoring](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/ipqs_fraud_and_risk_scoring.py)
|
||||
|
||||
<img src=logos/ipqualityscore.png height=60>
|
||||
|
||||
IPQualityScore MISP Expansion Module for IP reputation, Email Validation, Phone Number Validation, Malicious Domain and Malicious URL Scanner.
|
||||
- **features**:
|
||||
>This Module takes the IP Address, Domain, URL, Email and Phone Number MISP Attributes as input to query the IPQualityScore API.
|
||||
> The results of the IPQualityScore API are than returned as IPQS Fraud and Risk Scoring Object.
|
||||
> The object contains a copy of the enriched attribute with added tags presenting the verdict based on fraud score,risk score and other attributes from IPQualityScore.
|
||||
- **input**:
|
||||
>A MISP attribute of type IP Address(ip-src, ip-dst), Domain(hostname, domain), URL(url, uri), Email Address(email, email-src, email-dst, target-email, whois-registrant-email) and Phone Number(phone-number, whois-registrant-phone).
|
||||
- **output**:
|
||||
>IPQualityScore object, resulting from the query on the IPQualityScore API.
|
||||
- **references**:
|
||||
>https://www.ipqualityscore.com/
|
||||
- **requirements**:
|
||||
>A IPQualityScore API Key.
|
||||
|
||||
-----
|
||||
|
||||
#### [iprep](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/iprep.py)
|
||||
|
||||
Module to query IPRep data for IP addresses.
|
||||
|
@ -802,6 +842,8 @@ A module to submit files or URLs to Joe Sandbox for an advanced analysis, and re
|
|||
|
||||
<img src=logos/lastline.png height=60>
|
||||
|
||||
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
|
||||
|
||||
Query Lastline with an analysis link and parse the report into MISP attributes and objects.
|
||||
The analysis link can also be retrieved from the output of the [lastline_submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/lastline_submit.py) expansion module.
|
||||
- **features**:
|
||||
|
@ -821,6 +863,8 @@ The analysis link can also be retrieved from the output of the [lastline_submit]
|
|||
|
||||
<img src=logos/lastline.png height=60>
|
||||
|
||||
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
|
||||
|
||||
Module to submit a file or URL to Lastline.
|
||||
- **features**:
|
||||
>The module requires a Lastline Analysis `api_token` and `key`.
|
||||
|
@ -892,6 +936,39 @@ Query the MALWAREbazaar API to get additional information about the input hash a
|
|||
|
||||
-----
|
||||
|
||||
#### [mmdb_lookup](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/mmdb_lookup.py)
|
||||
|
||||
<img src=logos/circl.png height=60>
|
||||
|
||||
A hover and expansion module to enrich an ip with geolocation and ASN information from an mmdb server instance, such as CIRCL's ip.circl.lu.
|
||||
- **features**:
|
||||
>The module takes an IP address related attribute as input.
|
||||
> It queries the public CIRCL.lu mmdb-server instance, available at ip.circl.lu, by default. The module can be configured with a custom mmdb server url if required.
|
||||
> It is also possible to filter results on 1 db_source by configuring db_source_filter.
|
||||
- **input**:
|
||||
>An IP address attribute (for example ip-src or ip-src|port).
|
||||
- **output**:
|
||||
>Geolocation and asn objects.
|
||||
- **references**:
|
||||
> - https://data.public.lu/fr/datasets/geo-open-ip-address-geolocation-per-country-in-mmdb-format/
|
||||
> - https://github.com/adulau/mmdb-server
|
||||
|
||||
-----
|
||||
|
||||
#### [mwdb](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/mwdb.py)
|
||||
|
||||
Module to push malware samples to a MWDB instance
|
||||
- **features**:
|
||||
>An expansion module to push malware samples to a MWDB (https://github.com/CERT-Polska/mwdb-core) instance. This module does not push samples to a sandbox. This can be achieved via Karton (connected to the MWDB). Does: * Upload of attachment or malware sample to MWDB * Tags of events and/or attributes are added to MWDB. * Comment of the MISP attribute is added to MWDB. * A link back to the MISP event is added to MWDB via the MWDB attribute. * A link to the MWDB attribute is added as an enrichted attribute to the MISP event.
|
||||
- **input**:
|
||||
>Attachment or malware sample
|
||||
- **output**:
|
||||
>Link attribute that points to the sample at the MWDB instane
|
||||
- **requirements**:
|
||||
>* mwdblib installed (pip install mwdblib) ; * (optional) keys.py file to add tags of events/attributes to MWDB * (optional) MWDB attribute created for the link back to MISP (defined in mwdb_misp_attribute)
|
||||
|
||||
-----
|
||||
|
||||
#### [ocr_enrich](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/ocr_enrich.py)
|
||||
|
||||
Module to process some optical character recognition on pictures.
|
||||
|
@ -1016,6 +1093,25 @@ Module to get information from AlienVault OTX.
|
|||
|
||||
-----
|
||||
|
||||
#### [passivessh](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/passivessh.py)
|
||||
|
||||
<img src=logos/passivessh.png height=60>
|
||||
|
||||
An expansion module to query the CIRCL Passive SSH.
|
||||
- **features**:
|
||||
>The module queries the Passive SSH service from CIRCL.
|
||||
>
|
||||
> The module can be used an hover module but also an expansion model to add related MISP objects.
|
||||
>
|
||||
- **input**:
|
||||
>IP addresses or SSH fingerprints
|
||||
- **output**:
|
||||
>SSH key materials, complementary IP addresses with similar SSH key materials
|
||||
- **references**:
|
||||
>https://github.com/D4-project/passive-ssh
|
||||
|
||||
-----
|
||||
|
||||
#### [passivetotal](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/passivetotal.py)
|
||||
|
||||
<img src=logos/passivetotal.png height=60>
|
||||
|
@ -1099,6 +1195,24 @@ Module to extract freetext from a .pptx document.
|
|||
|
||||
-----
|
||||
|
||||
#### [qintel_qsentry](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/qintel_qsentry.py)
|
||||
|
||||
<img src=logos/qintel.png height=60>
|
||||
|
||||
A hover and expansion module which queries Qintel QSentry for ip reputation data
|
||||
- **features**:
|
||||
>This module takes an ip-address (ip-src or ip-dst) attribute as input, and queries the Qintel QSentry API to retrieve ip reputation data
|
||||
- **input**:
|
||||
>ip address attribute
|
||||
- **ouput**:
|
||||
>Objects containing the enriched IP, threat tags, last seen attributes and associated Autonomous System information
|
||||
- **references**:
|
||||
>https://www.qintel.com/products/qsentry/
|
||||
- **requirements**:
|
||||
>A Qintel API token
|
||||
|
||||
-----
|
||||
|
||||
#### [qrcode](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/qrcode.py)
|
||||
|
||||
Module to decode QR codes.
|
||||
|
@ -1567,6 +1681,26 @@ Module to submit a sample to VMRay.
|
|||
|
||||
-----
|
||||
|
||||
#### [vmware_nsx](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/vmware_nsx.py)
|
||||
|
||||
<img src=logos/vmware_nsx.png height=60>
|
||||
|
||||
Module to enrich a file or URL with VMware NSX Defender.
|
||||
- **features**:
|
||||
>This module takes an IoC such as file hash, file attachment, malware-sample or url as input to query VMware NSX Defender.
|
||||
>
|
||||
>The IoC is then enriched with data from VMware NSX Defender.
|
||||
- **input**:
|
||||
>File hash, attachment or URL to be enriched with VMware NSX Defender.
|
||||
- **output**:
|
||||
>Objects and tags generated by VMware NSX Defender.
|
||||
- **references**:
|
||||
>https://www.vmware.com
|
||||
- **requirements**:
|
||||
>The module requires a VMware NSX Defender Analysis `api_token` and `key`.
|
||||
|
||||
-----
|
||||
|
||||
#### [vulndb](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/vulndb.py)
|
||||
|
||||
<img src=logos/vulndb.png height=60>
|
||||
|
@ -1720,6 +1854,26 @@ An expansion hover module to perform a syntax check on if yara rules are valid o
|
|||
|
||||
-----
|
||||
|
||||
#### [yeti](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/yeti.py)
|
||||
|
||||
<img src=logos/yeti.png height=60>
|
||||
|
||||
Module to process a query on Yeti.
|
||||
- **features**:
|
||||
>This module add context and links between observables using yeti
|
||||
- **input**:
|
||||
>A domain, hostname,IP, sha256,sha1, md5, url of MISP attribute.
|
||||
- **output**:
|
||||
>MISP attributes and objects fetched from the Yeti instances.
|
||||
- **references**:
|
||||
> - https://github.com/yeti-platform/yeti
|
||||
> - https://github.com/sebdraven/pyeti
|
||||
- **requirements**:
|
||||
> - pyeti
|
||||
> - API key
|
||||
|
||||
-----
|
||||
|
||||
## Export Modules
|
||||
|
||||
#### [cef_export](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/cef_export.py)
|
||||
|
@ -1930,6 +2084,25 @@ Module to export a structured CSV file for uploading to ThreatConnect.
|
|||
|
||||
-----
|
||||
|
||||
#### [virustotal_collections](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/virustotal_collections.py)
|
||||
|
||||
<img src=logos/virustotal.png height=60>
|
||||
|
||||
Creates a VT Collection from an event iocs.
|
||||
- **features**:
|
||||
>This export module which takes advantage of a new endpoint in VT APIv3 to create VT Collections from IOCs contained in a MISP event. With this module users will be able to create a collection just using the Download as... button.
|
||||
- **input**:
|
||||
>A domain, hash (md5, sha1, sha256 or sha512), hostname, url or IP address attribute.
|
||||
- **output**:
|
||||
>A VirusTotal collection in VT.
|
||||
- **references**:
|
||||
> - https://www.virustotal.com/
|
||||
> - https://blog.virustotal.com/2021/11/introducing-virustotal-collections.html
|
||||
- **requirements**:
|
||||
>An access to the VirusTotal API (apikey).
|
||||
|
||||
-----
|
||||
|
||||
#### [vt_graph](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/vt_graph.py)
|
||||
|
||||
<img src=logos/virustotal.png height=60>
|
||||
|
@ -1952,6 +2125,22 @@ This module is used to create a VirusTotal Graph from a MISP event.
|
|||
|
||||
## Import Modules
|
||||
|
||||
#### [cof2misp](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/import_mod/cof2misp.py)
|
||||
|
||||
Passive DNS Common Output Format (COF) MISP importer
|
||||
- **features**:
|
||||
>Takes as input a valid COF file or the output of the dnsdbflex utility and creates MISP objects for the input.
|
||||
- **input**:
|
||||
>Passive DNS output in Common Output Format (COF)
|
||||
- **output**:
|
||||
>MISP objects
|
||||
- **references**:
|
||||
>https://tools.ietf.org/id/draft-dulaunoy-dnsop-passive-dns-cof-08.html
|
||||
- **requirements**:
|
||||
>PyMISP
|
||||
|
||||
-----
|
||||
|
||||
#### [csvimport](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/import_mod/csvimport.py)
|
||||
|
||||
Module to import MISP attributes from a csv file.
|
||||
|
@ -2044,6 +2233,8 @@ A module to import data from a Joe Sandbox analysis json report.
|
|||
|
||||
<img src=logos/lastline.png height=60>
|
||||
|
||||
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
|
||||
|
||||
Module to import and parse reports from Lastline analysis links.
|
||||
- **features**:
|
||||
>The module requires a Lastline Portal `username` and `password`.
|
||||
|
|
After Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 112 KiB After Width: | Height: | Size: 90 KiB |
After Width: | Height: | Size: 6.6 KiB |
After Width: | Height: | Size: 57 KiB |
After Width: | Height: | Size: 46 KiB |
After Width: | Height: | Size: 52 KiB |
|
@ -14,7 +14,8 @@ sudo apt-get install -y \
|
|||
zbar-tools \
|
||||
libzbar0 \
|
||||
libzbar-dev \
|
||||
libfuzzy-dev
|
||||
libfuzzy-dev \
|
||||
libcaca-dev
|
||||
|
||||
# BEGIN with virtualenv:
|
||||
$SUDO_WWW virtualenv -p python3 /var/www/MISP/venv
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{
|
||||
"description": "Module to access GreyNoise.io API",
|
||||
"description": "Module to query IP and CVE information from GreyNoise",
|
||||
"logo": "greynoise.png",
|
||||
"requirements": [
|
||||
"A Greynoise API key."
|
||||
"A Greynoise API key. Both Enterprise (Paid) and Community (Free) API keys are supported, however Community API users will only be able to perform IP lookups."
|
||||
],
|
||||
"input": "An IP address.",
|
||||
"output": "Additional information about the IP fetched from Greynoise API.",
|
||||
"input": "An IP address or CVE ID",
|
||||
"output": "IP Lookup information or CVE scanning profile for past 7 days",
|
||||
"references": [
|
||||
"https://greynoise.io/",
|
||||
"https://github.com/GreyNoise-Intelligence/api.greynoise.io"
|
||||
"https://docs.greyniose.io/",
|
||||
"https://www.greynoise.io/viz/account/"
|
||||
],
|
||||
"features": "The module takes an IP address as input and queries Greynoise for some additional information about it: basically it checks whether a given IP address is \u201cInternet background noise\u201d, or has been observed scanning or attacking devices across the Internet. The result is returned as text."
|
||||
"features": "This module supports: 1) Query an IP from GreyNoise to see if it is internet background noise or a common business service 2) Query a CVE from GreyNoise to see the total number of internet scanners looking for the CVE in the last 7 days."
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"description": "An expansion module to query the CIRCL hashlookup services to find it if a hash is part of a known set such as NSRL.",
|
||||
"logo": "circl.png",
|
||||
"input": "File hashes (MD5, SHA1)",
|
||||
"output": "Object with the filename associated hashes if the hash is part of a known set.",
|
||||
"references": [
|
||||
"https://www.circl.lu/services/hashlookup/"
|
||||
],
|
||||
"features": "The module takes file hashes as input such as a MD5 or SHA1.\n It queries the public CIRCL.lu hashlookup service and return all the hits if the hashes are known in an existing dataset. The module can be configured with a custom hashlookup url if required.\n The module can be used an hover module but also an expansion model to add related MISP objects.\n"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"description": "IPQualityScore MISP Expansion Module for IP reputation, Email Validation, Phone Number Validation, Malicious Domain and Malicious URL Scanner.",
|
||||
"logo": "ipqualityscore.png",
|
||||
"requirements": [
|
||||
"A IPQualityScore API Key."
|
||||
],
|
||||
"input": "A MISP attribute of type IP Address(ip-src, ip-dst), Domain(hostname, domain), URL(url, uri), Email Address(email, email-src, email-dst, target-email, whois-registrant-email) and Phone Number(phone-number, whois-registrant-phone).",
|
||||
"output": "IPQualityScore object, resulting from the query on the IPQualityScore API.",
|
||||
"references": [
|
||||
"https://www.ipqualityscore.com/"
|
||||
],
|
||||
"features": "This Module takes the IP Address, Domain, URL, Email and Phone Number MISP Attributes as input to query the IPQualityScore API.\n The results of the IPQualityScore API are than returned as IPQS Fraud and Risk Scoring Object. \n The object contains a copy of the enriched attribute with added tags presenting the verdict based on fraud score,risk score and other attributes from IPQualityScore."
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"description": "Query Lastline with an analysis link and parse the report into MISP attributes and objects.\nThe analysis link can also be retrieved from the output of the [lastline_submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/lastline_submit.py) expansion module.",
|
||||
"description": "Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.\n\nQuery Lastline with an analysis link and parse the report into MISP attributes and objects.\nThe analysis link can also be retrieved from the output of the [lastline_submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/lastline_submit.py) expansion module.",
|
||||
"logo": "lastline.png",
|
||||
"requirements": [],
|
||||
"input": "Link to a Lastline analysis.",
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"description": "Module to submit a file or URL to Lastline.",
|
||||
"description": "Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.\n\nModule to submit a file or URL to Lastline.",
|
||||
"logo": "lastline.png",
|
||||
"requirements": [],
|
||||
"input": "File or URL to submit to Lastline.",
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"description": "A hover and expansion module to enrich an ip with geolocation and ASN information from an mmdb server instance, such as CIRCL's ip.circl.lu.",
|
||||
"logo": "circl.png",
|
||||
"input": "An IP address attribute (for example ip-src or ip-src|port).",
|
||||
"output": "Geolocation and asn objects.",
|
||||
"references": [
|
||||
"https://data.public.lu/fr/datasets/geo-open-ip-address-geolocation-per-country-in-mmdb-format/",
|
||||
"https://github.com/adulau/mmdb-server"
|
||||
],
|
||||
"features": "The module takes an IP address related attribute as input.\n It queries the public CIRCL.lu mmdb-server instance, available at ip.circl.lu, by default. The module can be configured with a custom mmdb server url if required.\n It is also possible to filter results on 1 db_source by configuring db_source_filter."
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"description": "Module to push malware samples to a MWDB instance",
|
||||
"requirements": [
|
||||
"* mwdblib installed (pip install mwdblib) ; * (optional) keys.py file to add tags of events/attributes to MWDB * (optional) MWDB attribute created for the link back to MISP (defined in mwdb_misp_attribute)"
|
||||
],
|
||||
"input": "Attachment or malware sample",
|
||||
"output": "Link attribute that points to the sample at the MWDB instane",
|
||||
"references": [
|
||||
],
|
||||
"features": "An expansion module to push malware samples to a MWDB (https://github.com/CERT-Polska/mwdb-core) instance. This module does not push samples to a sandbox. This can be achieved via Karton (connected to the MWDB). Does: * Upload of attachment or malware sample to MWDB * Tags of events and/or attributes are added to MWDB. * Comment of the MISP attribute is added to MWDB. * A link back to the MISP event is added to MWDB via the MWDB attribute. * A link to the MWDB attribute is added as an enrichted attribute to the MISP event."
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"description": "An expansion module to query the CIRCL Passive SSH.",
|
||||
"logo": "passivessh.png",
|
||||
"input": "IP addresses or SSH fingerprints",
|
||||
"output": "SSH key materials, complementary IP addresses with similar SSH key materials",
|
||||
"references": [
|
||||
"https://github.com/D4-project/passive-ssh"
|
||||
],
|
||||
"features": "The module queries the Passive SSH service from CIRCL.\n \n The module can be used an hover module but also an expansion model to add related MISP objects.\n"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"description": "A hover and expansion module which queries Qintel QSentry for ip reputation data",
|
||||
"logo": "qintel.png",
|
||||
"requirements": [
|
||||
"A Qintel API token"
|
||||
],
|
||||
"input": "ip address attribute",
|
||||
"ouput": "Objects containing the enriched IP, threat tags, last seen attributes and associated Autonomous System information",
|
||||
"features": "This module takes an ip-address (ip-src or ip-dst) attribute as input, and queries the Qintel QSentry API to retrieve ip reputation data",
|
||||
"references": [
|
||||
"https://www.qintel.com/products/qsentry/"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"description": "Module to enrich a file or URL with VMware NSX Defender.",
|
||||
"logo": "vmware_nsx.png",
|
||||
"requirements": [
|
||||
"The module requires a VMware NSX Defender Analysis `api_token` and `key`."
|
||||
],
|
||||
"input": "File hash, attachment or URL to be enriched with VMware NSX Defender.",
|
||||
"output": "Objects and tags generated by VMware NSX Defender.",
|
||||
"references": [
|
||||
"https://www.vmware.com"
|
||||
],
|
||||
"features": "This module takes an IoC such as file hash, file attachment, malware-sample or url as input to query VMware NSX Defender.\n\nThe IoC is then enriched with data from VMware NSX Defender."
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"description": "Creates a VT Collection from an event iocs.",
|
||||
"logo": "virustotal.png",
|
||||
"requirements": [
|
||||
"An access to the VirusTotal API (apikey)."
|
||||
],
|
||||
"input": "A domain, hash (md5, sha1, sha256 or sha512), hostname, url or IP address attribute.",
|
||||
"output": "A VirusTotal collection in VT.",
|
||||
"references": [
|
||||
"https://www.virustotal.com/",
|
||||
"https://blog.virustotal.com/2021/11/introducing-virustotal-collections.html"
|
||||
],
|
||||
"features": "This export module which takes advantage of a new endpoint in VT APIv3 to create VT Collections from IOCs contained in a MISP event. With this module users will be able to create a collection just using the Download as... button."
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"description": "Passive DNS Common Output Format (COF) MISP importer",
|
||||
"requirements": [
|
||||
"PyMISP"
|
||||
],
|
||||
"features": "Takes as input a valid COF file or the output of the dnsdbflex utility and creates MISP objects for the input.",
|
||||
"references": [
|
||||
"https://tools.ietf.org/id/draft-dulaunoy-dnsop-passive-dns-cof-08.html"
|
||||
],
|
||||
"input": "Passive DNS output in Common Output Format (COF)",
|
||||
"output": "MISP objects"
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"description": "Module to import and parse reports from Lastline analysis links.",
|
||||
"description": "Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.\n\nModule to import and parse reports from Lastline analysis links.",
|
||||
"logo": "lastline.png",
|
||||
"requirements": [],
|
||||
"input": "Link to a Lastline analysis.",
|
||||
|
|
|
@ -41,14 +41,14 @@ try:
|
|||
from .modules import * # noqa
|
||||
HAS_PACKAGE_MODULES = True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
logging.exception(e)
|
||||
HAS_PACKAGE_MODULES = False
|
||||
|
||||
try:
|
||||
from .helpers import * # noqa
|
||||
HAS_PACKAGE_HELPERS = True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
logging.exception(e)
|
||||
HAS_PACKAGE_HELPERS = False
|
||||
|
||||
log = logging.getLogger('misp-modules')
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import joe_mapping
|
||||
from .vt_graph_parser import * # noqa
|
||||
|
||||
all = ['joe_parser', 'lastline_api', 'cof2misp']
|
||||
all = ['joe_parser', 'lastline_api', 'cof2misp', 'qintel_helper']
|
||||
|
|
|
@ -27,7 +27,7 @@ def is_valid_ip(ip: str) -> bool:
|
|||
try:
|
||||
ipaddress.ip_address(ip)
|
||||
except Exception as ex:
|
||||
print("is_valid_ip(%s) returned False. Reason: %s" % (ip, str(ex)), file=sys.stderr)
|
||||
print("is_valid_ip(%s) returned False. Reason: %s" % (ip, str(ex)), file = sys.stderr)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -39,7 +39,7 @@ def is_cof_valid_strict(d: dict) -> bool:
|
|||
--------
|
||||
True on success, False on validation failure.
|
||||
"""
|
||||
return True # FIXME
|
||||
return True # FIXME
|
||||
|
||||
|
||||
def is_cof_valid_simple(d: dict) -> bool:
|
||||
|
@ -51,28 +51,29 @@ def is_cof_valid_simple(d: dict) -> bool:
|
|||
"""
|
||||
|
||||
if "rrname" not in d:
|
||||
print("Missing MANDATORY field 'rrname'", file=sys.stderr)
|
||||
print("Missing MANDATORY field 'rrname'", file = sys.stderr)
|
||||
return False
|
||||
if not isinstance(d['rrname'], str):
|
||||
print("Type error: 'rrname' is not a JSON string", file=sys.stderr)
|
||||
print("Type error: 'rrname' is not a JSON string", file = sys.stderr)
|
||||
return False
|
||||
if "rrtype" not in d:
|
||||
print("Missing MANDATORY field 'rrtype'", file=sys.stderr)
|
||||
print("Missing MANDATORY field 'rrtype'", file = sys.stderr)
|
||||
return False
|
||||
if not isinstance(d['rrtype'], str):
|
||||
print("Type error: 'rrtype' is not a JSON string", file=sys.stderr)
|
||||
print("Type error: 'rrtype' is not a JSON string", file = sys.stderr)
|
||||
return False
|
||||
if "rdata" not in d:
|
||||
print("Missing MANDATORY field 'rdata'", file=sys.stderr)
|
||||
print("Missing MANDATORY field 'rdata'", file = sys.stderr)
|
||||
return False
|
||||
if "rdata" not in d:
|
||||
print("Missing MANDATORY field 'rdata'", file=sys.stderr)
|
||||
print("Missing MANDATORY field 'rdata'", file = sys.stderr)
|
||||
return False
|
||||
if not isinstance(d['rdata'], str) and not isinstance(d['rdata'], list):
|
||||
print("'rdata' is not a list and not a string.", file=sys.stderr)
|
||||
print("'rdata' is not a list and not a string.", file = sys.stderr)
|
||||
return False
|
||||
if not ("time_first" in d and "time_last" in d) or ("zone_time_first" in d and "zone_time_last" in d):
|
||||
print("We are missing EITHER ('first_seen' and 'last_seen') OR ('zone_time_first' and zone_time_last') fields", file=sys.stderr)
|
||||
print("We are missing EITHER ('first_seen' and 'last_seen') OR ('zone_time_first' and zone_time_last') fields",
|
||||
file = sys.stderr)
|
||||
return False
|
||||
# currently we don't check the OPTIONAL fields. Sorry... to be done later.
|
||||
return True
|
||||
|
@ -94,22 +95,45 @@ def validate_cof(d: dict, strict=True) -> bool:
|
|||
return is_cof_valid_strict(d)
|
||||
|
||||
|
||||
def validate_dnsdbflex(d: dict, strict=True) -> bool:
|
||||
"""
|
||||
Validate if dict d is valid dnsdbflex. It should looks like this:
|
||||
{ "rrtype": <str>, "rrname": <str> }
|
||||
"""
|
||||
if "rrname" not in d:
|
||||
print("Missing MANDATORY field 'rrname'", file = sys.stderr)
|
||||
return False
|
||||
if not isinstance(d['rrname'], str):
|
||||
print("Type error: 'rrname' is not a JSON string", file = sys.stderr)
|
||||
return False
|
||||
if "rrtype" not in d:
|
||||
print("Missing MANDATORY field 'rrtype'", file = sys.stderr)
|
||||
return False
|
||||
if not isinstance(d['rrtype'], str):
|
||||
print("Type error: 'rrtype' is not a JSON string", file = sys.stderr)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# simple, poor man's unit tests.
|
||||
|
||||
print(80 * "=", file=sys.stderr)
|
||||
print("Unit Tests:", file=sys.stderr)
|
||||
print(80 * "=", file = sys.stderr)
|
||||
print("Unit Tests:", file = sys.stderr)
|
||||
assert not is_valid_ip("a.2.3.4")
|
||||
assert is_valid_ip("99.88.77.6")
|
||||
assert is_valid_ip("2a0c:88:77:6::1")
|
||||
|
||||
# COF validation
|
||||
print(80 * "=", file = sys.stderr)
|
||||
print("COF unit tests....", file = sys.stderr)
|
||||
|
||||
mock_input = """{"count":1909,"rdata":["cpa.circl.lu"],"rrname":"www.circl.lu","rrtype":"CNAME","time_first":"1315586409","time_last":"1449566799"}
|
||||
{"count":2560,"rdata":["cpab.circl.lu"],"rrname":"www.circl.lu","rrtype":"CNAME","time_first":"1449584660","time_last":"1617676151"}"""
|
||||
|
||||
i = 0
|
||||
for entry in ndjson.loads(mock_input):
|
||||
retval = validate_cof(entry, strict=False)
|
||||
retval = validate_cof(entry, strict = False)
|
||||
assert retval
|
||||
print("line %d is valid: %s" % (i, retval))
|
||||
i += 1
|
||||
|
@ -118,5 +142,24 @@ if __name__ == "__main__":
|
|||
for entry in ndjson.loads(test2):
|
||||
assert validate_cof(entry)
|
||||
|
||||
print(80 * "=", file=sys.stderr)
|
||||
print("Unit Tests DONE", file=sys.stderr)
|
||||
# dnsdbflex validation
|
||||
print(80 * "=", file = sys.stderr)
|
||||
print("dnsdbflex unit tests....", file = sys.stderr)
|
||||
|
||||
mock_input = """{"rrname":"labs.deep-insights.ai.","rrtype":"A"}
|
||||
{"rrname":"www.deep-insights.ca.","rrtype":"CNAME"}
|
||||
{"rrname":"mail.deep-insights.ca.","rrtype":"CNAME"}
|
||||
{"rrname":"cpanel.deep-insights.ca.","rrtype":"A"}
|
||||
{"rrname":"webdisk.deep-insights.ca.","rrtype":"A"}
|
||||
{"rrname":"webmail.deep-insights.ca.","rrtype":"A"}"""
|
||||
|
||||
i = 0
|
||||
for entry in ndjson.loads(mock_input):
|
||||
retval = validate_dnsdbflex(entry, strict = False)
|
||||
assert retval
|
||||
print("dnsdbflex line %d is valid: %s" % (i, retval))
|
||||
i += 1
|
||||
|
||||
|
||||
print(80 * "=", file = sys.stderr)
|
||||
print("Unit Tests DONE", file = sys.stderr)
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
arch_type_mapping = {
|
||||
'ANDROID': 'parse_apk',
|
||||
'LINUX': 'parse_elf',
|
||||
'WINDOWS': 'parse_pe'
|
||||
}
|
||||
domain_object_mapping = {
|
||||
'@ip': {'type': 'ip-dst', 'object_relation': 'ip'},
|
||||
'@name': {'type': 'domain', 'object_relation': 'domain'}
|
||||
}
|
||||
dropped_file_mapping = {
|
||||
'@entropy': {'type': 'float', 'object_relation': 'entropy'},
|
||||
'@file': {'type': 'filename', 'object_relation': 'filename'},
|
||||
'@size': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
|
||||
'@type': {'type': 'mime-type', 'object_relation': 'mimetype'}
|
||||
}
|
||||
dropped_hash_mapping = {
|
||||
'MD5': 'md5',
|
||||
'SHA': 'sha1',
|
||||
'SHA-256': 'sha256',
|
||||
'SHA-512': 'sha512'
|
||||
}
|
||||
elf_object_mapping = {
|
||||
'epaddr': 'entrypoint-address',
|
||||
'machine': 'arch',
|
||||
'osabi': 'os_abi'
|
||||
}
|
||||
elf_section_flags_mapping = {
|
||||
'A': 'ALLOC',
|
||||
'I': 'INFO_LINK',
|
||||
'M': 'MERGE',
|
||||
'S': 'STRINGS',
|
||||
'T': 'TLS',
|
||||
'W': 'WRITE',
|
||||
'X': 'EXECINSTR'
|
||||
}
|
||||
file_object_fields = (
|
||||
'filename',
|
||||
'md5',
|
||||
'sha1',
|
||||
'sha256',
|
||||
'sha512',
|
||||
'ssdeep'
|
||||
)
|
||||
file_object_mapping = {
|
||||
'entropy': {'type': 'float', 'object_relation': 'entropy'},
|
||||
'filesize': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
|
||||
'filetype': {'type': 'mime-type', 'object_relation': 'mimetype'}
|
||||
}
|
||||
file_references_mapping = {
|
||||
'fileCreated': 'creates',
|
||||
'fileDeleted': 'deletes',
|
||||
'fileMoved': 'moves',
|
||||
'fileRead': 'reads',
|
||||
'fileWritten': 'writes'
|
||||
}
|
||||
network_behavior_fields = ('srcip', 'dstip', 'srcport', 'dstport')
|
||||
network_connection_object_mapping = {
|
||||
'srcip': {'type': 'ip-src', 'object_relation': 'ip-src'},
|
||||
'dstip': {'type': 'ip-dst', 'object_relation': 'ip-dst'},
|
||||
'srcport': {'type': 'port', 'object_relation': 'src-port'},
|
||||
'dstport': {'type': 'port', 'object_relation': 'dst-port'}
|
||||
}
|
||||
pe_object_fields = {
|
||||
'entrypoint': {'type': 'text', 'object_relation': 'entrypoint-address'},
|
||||
'imphash': {'type': 'imphash', 'object_relation': 'imphash'}
|
||||
}
|
||||
pe_object_mapping = {
|
||||
'CompanyName': 'company-name',
|
||||
'FileDescription': 'file-description',
|
||||
'FileVersion': 'file-version',
|
||||
'InternalName': 'internal-filename',
|
||||
'LegalCopyright': 'legal-copyright',
|
||||
'OriginalFilename': 'original-filename',
|
||||
'ProductName': 'product-filename',
|
||||
'ProductVersion': 'product-version',
|
||||
'Translation': 'lang-id'
|
||||
}
|
||||
pe_section_object_mapping = {
|
||||
'characteristics': {'type': 'text', 'object_relation': 'characteristic'},
|
||||
'entropy': {'type': 'float', 'object_relation': 'entropy'},
|
||||
'name': {'type': 'text', 'object_relation': 'name'},
|
||||
'rawaddr': {'type': 'hex', 'object_relation': 'offset'},
|
||||
'rawsize': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
|
||||
'virtaddr': {'type': 'hex', 'object_relation': 'virtual_address'},
|
||||
'virtsize': {'type': 'size-in-bytes', 'object_relation': 'virtual_size'}
|
||||
}
|
||||
process_object_fields = {
|
||||
'cmdline': 'command-line',
|
||||
'name': 'name',
|
||||
'parentpid': 'parent-pid',
|
||||
'pid': 'pid',
|
||||
'path': 'current-directory'
|
||||
}
|
||||
protocols = {
|
||||
'tcp': 4,
|
||||
'udp': 4,
|
||||
'icmp': 3,
|
||||
'http': 7,
|
||||
'https': 7,
|
||||
'ftp': 7
|
||||
}
|
||||
registry_references_mapping = {
|
||||
'keyValueCreated': 'creates',
|
||||
'keyValueModified': 'modifies'
|
||||
}
|
||||
regkey_object_mapping = {
|
||||
'name': {'type': 'text', 'object_relation': 'name'},
|
||||
'newdata': {'type': 'text', 'object_relation': 'data'},
|
||||
'path': {'type': 'regkey', 'object_relation': 'key'}
|
||||
}
|
||||
signerinfo_object_mapping = {
|
||||
'sigissuer': {'type': 'text', 'object_relation': 'issuer'},
|
||||
'version': {'type': 'text', 'object_relation': 'version'}
|
||||
}
|
|
@ -1,53 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
||||
import json
|
||||
|
||||
|
||||
arch_type_mapping = {'ANDROID': 'parse_apk', 'LINUX': 'parse_elf', 'WINDOWS': 'parse_pe'}
|
||||
domain_object_mapping = {'@ip': ('ip-dst', 'ip'), '@name': ('domain', 'domain')}
|
||||
dropped_file_mapping = {'@entropy': ('float', 'entropy'),
|
||||
'@file': ('filename', 'filename'),
|
||||
'@size': ('size-in-bytes', 'size-in-bytes'),
|
||||
'@type': ('mime-type', 'mimetype')}
|
||||
dropped_hash_mapping = {'MD5': 'md5', 'SHA': 'sha1', 'SHA-256': 'sha256', 'SHA-512': 'sha512'}
|
||||
elf_object_mapping = {'epaddr': 'entrypoint-address', 'machine': 'arch', 'osabi': 'os_abi'}
|
||||
elf_section_flags_mapping = {'A': 'ALLOC', 'I': 'INFO_LINK', 'M': 'MERGE',
|
||||
'S': 'STRINGS', 'T': 'TLS', 'W': 'WRITE',
|
||||
'X': 'EXECINSTR'}
|
||||
file_object_fields = ['filename', 'md5', 'sha1', 'sha256', 'sha512', 'ssdeep']
|
||||
file_object_mapping = {'entropy': ('float', 'entropy'),
|
||||
'filesize': ('size-in-bytes', 'size-in-bytes'),
|
||||
'filetype': ('mime-type', 'mimetype')}
|
||||
file_references_mapping = {'fileCreated': 'creates', 'fileDeleted': 'deletes',
|
||||
'fileMoved': 'moves', 'fileRead': 'reads', 'fileWritten': 'writes'}
|
||||
network_behavior_fields = ('srcip', 'dstip', 'srcport', 'dstport')
|
||||
network_connection_object_mapping = {'srcip': ('ip-src', 'ip-src'), 'dstip': ('ip-dst', 'ip-dst'),
|
||||
'srcport': ('port', 'src-port'), 'dstport': ('port', 'dst-port')}
|
||||
pe_object_fields = {'entrypoint': ('text', 'entrypoint-address'),
|
||||
'imphash': ('imphash', 'imphash')}
|
||||
pe_object_mapping = {'CompanyName': 'company-name', 'FileDescription': 'file-description',
|
||||
'FileVersion': 'file-version', 'InternalName': 'internal-filename',
|
||||
'LegalCopyright': 'legal-copyright', 'OriginalFilename': 'original-filename',
|
||||
'ProductName': 'product-filename', 'ProductVersion': 'product-version',
|
||||
'Translation': 'lang-id'}
|
||||
pe_section_object_mapping = {'characteristics': ('text', 'characteristic'),
|
||||
'entropy': ('float', 'entropy'),
|
||||
'name': ('text', 'name'), 'rawaddr': ('hex', 'offset'),
|
||||
'rawsize': ('size-in-bytes', 'size-in-bytes'),
|
||||
'virtaddr': ('hex', 'virtual_address'),
|
||||
'virtsize': ('size-in-bytes', 'virtual_size')}
|
||||
process_object_fields = {'cmdline': 'command-line', 'name': 'name',
|
||||
'parentpid': 'parent-pid', 'pid': 'pid',
|
||||
'path': 'current-directory'}
|
||||
protocols = {'tcp': 4, 'udp': 4, 'icmp': 3,
|
||||
'http': 7, 'https': 7, 'ftp': 7}
|
||||
registry_references_mapping = {'keyValueCreated': 'creates', 'keyValueModified': 'modifies'}
|
||||
regkey_object_mapping = {'name': ('text', 'name'), 'newdata': ('text', 'data'),
|
||||
'path': ('regkey', 'key')}
|
||||
signerinfo_object_mapping = {'sigissuer': ('text', 'issuer'),
|
||||
'version': ('text', 'version')}
|
||||
from joe_mapping import (arch_type_mapping, domain_object_mapping,
|
||||
dropped_file_mapping, dropped_hash_mapping, elf_object_mapping,
|
||||
elf_section_flags_mapping, file_object_fields, file_object_mapping,
|
||||
file_references_mapping, network_behavior_fields,
|
||||
network_connection_object_mapping, pe_object_fields, pe_object_mapping,
|
||||
pe_section_object_mapping, process_object_fields, protocols,
|
||||
registry_references_mapping, regkey_object_mapping, signerinfo_object_mapping)
|
||||
|
||||
|
||||
class JoeParser():
|
||||
|
@ -57,7 +19,7 @@ class JoeParser():
|
|||
self.attributes = defaultdict(lambda: defaultdict(set))
|
||||
self.process_references = {}
|
||||
|
||||
self.import_pe = config["import_pe"]
|
||||
self.import_executable = config["import_executable"]
|
||||
self.create_mitre_attack = config["mitre_attack"]
|
||||
|
||||
def parse_data(self, data):
|
||||
|
@ -101,26 +63,46 @@ class JoeParser():
|
|||
for droppedfile in droppedinfo['hash']:
|
||||
file_object = MISPObject('file')
|
||||
for key, mapping in dropped_file_mapping.items():
|
||||
attribute_type, object_relation = mapping
|
||||
file_object.add_attribute(object_relation, **{'type': attribute_type, 'value': droppedfile[key], 'to_ids': False})
|
||||
if droppedfile.get(key) is not None:
|
||||
attribute = {'value': droppedfile[key], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
file_object.add_attribute(**attribute)
|
||||
if droppedfile['@malicious'] == 'true':
|
||||
file_object.add_attribute('state', **{'type': 'text', 'value': 'Malicious', 'to_ids': False})
|
||||
file_object.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': 'state',
|
||||
'value': 'Malicious',
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for h in droppedfile['value']:
|
||||
hash_type = dropped_hash_mapping[h['@algo']]
|
||||
file_object.add_attribute(hash_type, **{'type': hash_type, 'value': h['$'], 'to_ids': False})
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.references[self.process_references[(int(droppedfile['@targetid']), droppedfile['@process'])]].append({
|
||||
'referenced_uuid': file_object.uuid,
|
||||
'relationship_type': 'drops'
|
||||
})
|
||||
file_object.add_attribute(
|
||||
**{
|
||||
'type': hash_type,
|
||||
'object_relation': hash_type,
|
||||
'value': h['$'],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(file_object)
|
||||
reference_key = (int(droppedfile['@targetid']), droppedfile['@process'])
|
||||
if reference_key in self.process_references:
|
||||
self.references[self.process_references[reference_key]].append(
|
||||
{
|
||||
'referenced_uuid': file_object.uuid,
|
||||
'relationship_type': 'drops'
|
||||
}
|
||||
)
|
||||
|
||||
def parse_mitre_attack(self):
|
||||
mitreattack = self.data['mitreattack']
|
||||
mitreattack = self.data.get('mitreattack', {})
|
||||
if mitreattack:
|
||||
for tactic in mitreattack['tactic']:
|
||||
if tactic.get('technique'):
|
||||
for technique in tactic['technique']:
|
||||
self.misp_event.add_tag('misp-galaxy:mitre-attack-pattern="{} - {}"'.format(technique['name'], technique['id']))
|
||||
self.misp_event.add_tag(f'misp-galaxy:mitre-attack-pattern="{technique["name"]} - {technique["id"]}"')
|
||||
|
||||
def parse_network_behavior(self):
|
||||
network = self.data['behavior']['network']
|
||||
|
@ -134,39 +116,69 @@ class JoeParser():
|
|||
attributes = self.prefetch_attributes_data(connection)
|
||||
if len(data.keys()) == len(set(protocols[protocol] for protocol in data.keys())):
|
||||
network_connection_object = MISPObject('network-connection')
|
||||
for object_relation, attribute in attributes.items():
|
||||
network_connection_object.add_attribute(object_relation, **attribute)
|
||||
network_connection_object.add_attribute('first-packet-seen',
|
||||
**{'type': 'datetime',
|
||||
'value': min(tuple(min(timestamp) for timestamp in data.values())),
|
||||
'to_ids': False})
|
||||
for attribute in attributes:
|
||||
network_connection_object.add_attribute(**attribute)
|
||||
network_connection_object.add_attribute(
|
||||
**{
|
||||
'type': 'datetime',
|
||||
'object_relation': 'first-packet-seen',
|
||||
'value': min(tuple(min(timestamp) for timestamp in data.values())),
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for protocol in data.keys():
|
||||
network_connection_object.add_attribute('layer{}-protocol'.format(protocols[protocol]),
|
||||
**{'type': 'text', 'value': protocol, 'to_ids': False})
|
||||
self.misp_event.add_object(**network_connection_object)
|
||||
network_connection_object.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': f'layer{protocols[protocol]}-protocol',
|
||||
'value': protocol,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(network_connection_object)
|
||||
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
|
||||
relationship_type='initiates'))
|
||||
else:
|
||||
for protocol, timestamps in data.items():
|
||||
network_connection_object = MISPObject('network-connection')
|
||||
for object_relation, attribute in attributes.items():
|
||||
network_connection_object.add_attribute(object_relation, **attribute)
|
||||
network_connection_object.add_attribute('first-packet-seen', **{'type': 'datetime', 'value': min(timestamps), 'to_ids': False})
|
||||
network_connection_object.add_attribute('layer{}-protocol'.format(protocols[protocol]), **{'type': 'text', 'value': protocol, 'to_ids': False})
|
||||
self.misp_event.add_object(**network_connection_object)
|
||||
for attribute in attributes:
|
||||
network_connection_object.add_attribute(**attribute)
|
||||
network_connection_object.add_attribute(
|
||||
**{
|
||||
'type': 'datetime',
|
||||
'object_relation': 'first-packet-seen',
|
||||
'value': min(timestamps),
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
network_connection_object.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': f'layer{protocols[protocol]}-protocol',
|
||||
'value': protocol,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(network_connection_object)
|
||||
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
|
||||
relationship_type='initiates'))
|
||||
|
||||
def parse_screenshot(self):
|
||||
screenshotdata = self.data['behavior']['screenshotdata']
|
||||
if screenshotdata:
|
||||
screenshotdata = screenshotdata['interesting']['$']
|
||||
attribute = {'type': 'attachment', 'value': 'screenshot.jpg',
|
||||
'data': screenshotdata, 'disable_correlation': True,
|
||||
'to_ids': False}
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
if self.data['behavior'].get('screenshotdata', {}).get('interesting') is not None:
|
||||
screenshotdata = self.data['behavior']['screenshotdata']['interesting']['$']
|
||||
self.misp_event.add_attribute(
|
||||
**{
|
||||
'type': 'attachment',
|
||||
'value': 'screenshot.jpg',
|
||||
'data': screenshotdata,
|
||||
'disable_correlation': True,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
|
||||
def parse_system_behavior(self):
|
||||
if not 'system' in self.data['behavior']:
|
||||
return
|
||||
system = self.data['behavior']['system']
|
||||
if system.get('processes'):
|
||||
process_activities = {'fileactivities': self.parse_fileactivities,
|
||||
|
@ -175,10 +187,24 @@ class JoeParser():
|
|||
general = process['general']
|
||||
process_object = MISPObject('process')
|
||||
for feature, relation in process_object_fields.items():
|
||||
process_object.add_attribute(relation, **{'type': 'text', 'value': general[feature], 'to_ids': False})
|
||||
start_time = datetime.strptime('{} {}'.format(general['date'], general['time']), '%d/%m/%Y %H:%M:%S')
|
||||
process_object.add_attribute('start-time', **{'type': 'datetime', 'value': start_time, 'to_ids': False})
|
||||
self.misp_event.add_object(**process_object)
|
||||
process_object.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': relation,
|
||||
'value': general[feature],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
start_time = datetime.strptime(f"{general['date']} {general['time']}", '%d/%m/%Y %H:%M:%S')
|
||||
process_object.add_attribute(
|
||||
**{
|
||||
'type': 'datetime',
|
||||
'object_relation': 'start-time',
|
||||
'value': start_time,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(process_object)
|
||||
for field, to_call in process_activities.items():
|
||||
if process.get(field):
|
||||
to_call(process_object.uuid, process[field])
|
||||
|
@ -211,9 +237,15 @@ class JoeParser():
|
|||
|
||||
url_object = MISPObject("url")
|
||||
self.analysisinfo_uuid = url_object.uuid
|
||||
|
||||
url_object.add_attribute("url", generalinfo["target"]["url"], to_ids=False)
|
||||
self.misp_event.add_object(**url_object)
|
||||
url_object.add_attribute(
|
||||
**{
|
||||
'type': 'url',
|
||||
'object_relation': 'url',
|
||||
'value': generalinfo["target"]["url"],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(url_object)
|
||||
|
||||
def parse_fileinfo(self):
|
||||
fileinfo = self.data['fileinfo']
|
||||
|
@ -222,20 +254,29 @@ class JoeParser():
|
|||
self.analysisinfo_uuid = file_object.uuid
|
||||
|
||||
for field in file_object_fields:
|
||||
file_object.add_attribute(field, **{'type': field, 'value': fileinfo[field], 'to_ids': False})
|
||||
file_object.add_attribute(
|
||||
**{
|
||||
'type': field,
|
||||
'object_relation': field,
|
||||
'value': fileinfo[field],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for field, mapping in file_object_mapping.items():
|
||||
attribute_type, object_relation = mapping
|
||||
file_object.add_attribute(object_relation, **{'type': attribute_type, 'value': fileinfo[field], 'to_ids': False})
|
||||
if fileinfo.get(field) is not None:
|
||||
attribute = {'value': fileinfo[field], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
file_object.add_attribute(**attribute)
|
||||
arch = self.data['generalinfo']['arch']
|
||||
if arch in arch_type_mapping:
|
||||
if self.import_executable and arch in arch_type_mapping:
|
||||
to_call = arch_type_mapping[arch]
|
||||
getattr(self, to_call)(fileinfo, file_object)
|
||||
else:
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.misp_event.add_object(file_object)
|
||||
|
||||
def parse_apk(self, fileinfo, file_object):
|
||||
apkinfo = fileinfo['apk']
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.misp_event.add_object(file_object)
|
||||
permission_lists = defaultdict(list)
|
||||
for permission in apkinfo['requiredpermissions']['permission']:
|
||||
permission = permission['@name'].split('.')
|
||||
|
@ -243,16 +284,30 @@ class JoeParser():
|
|||
attribute_type = 'text'
|
||||
for comment, permissions in permission_lists.items():
|
||||
permission_object = MISPObject('android-permission')
|
||||
permission_object.add_attribute('comment', **dict(type=attribute_type, value=comment, to_ids=False))
|
||||
permission_object.add_attribute(
|
||||
**{
|
||||
'type': attribute_type,
|
||||
'object_relation': 'comment',
|
||||
'value': comment,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for permission in permissions:
|
||||
permission_object.add_attribute('permission', **dict(type=attribute_type, value=permission, to_ids=False))
|
||||
self.misp_event.add_object(**permission_object)
|
||||
permission_object.add_attribute(
|
||||
**{
|
||||
'type': attribute_type,
|
||||
'object_relation': 'permission',
|
||||
'value': permission,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(permission_object)
|
||||
self.references[file_object.uuid].append(dict(referenced_uuid=permission_object.uuid,
|
||||
relationship_type='grants'))
|
||||
|
||||
def parse_elf(self, fileinfo, file_object):
|
||||
elfinfo = fileinfo['elf']
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.misp_event.add_object(file_object)
|
||||
attribute_type = 'text'
|
||||
relationship = 'includes'
|
||||
size = 'size-in-bytes'
|
||||
|
@ -264,47 +319,96 @@ class JoeParser():
|
|||
if elf.get('type'):
|
||||
# Haven't seen anything but EXEC yet in the files I tested
|
||||
attribute_value = "EXECUTABLE" if elf['type'] == "EXEC (Executable file)" else elf['type']
|
||||
elf_object.add_attribute('type', **dict(type=attribute_type, value=attribute_value, to_ids=False))
|
||||
elf_object.add_attribute(
|
||||
**{
|
||||
'type': attribute_type,
|
||||
'object_relation': 'type',
|
||||
'value': attribute_value,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for feature, relation in elf_object_mapping.items():
|
||||
if elf.get(feature):
|
||||
elf_object.add_attribute(relation, **dict(type=attribute_type, value=elf[feature], to_ids=False))
|
||||
elf_object.add_attribute(
|
||||
**{
|
||||
'type': attribute_type,
|
||||
'object_relation': relation,
|
||||
'value': elf[feature],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
sections_number = len(fileinfo['sections']['section'])
|
||||
elf_object.add_attribute('number-sections', **{'type': 'counter', 'value': sections_number, 'to_ids': False})
|
||||
self.misp_event.add_object(**elf_object)
|
||||
elf_object.add_attribute(
|
||||
**{
|
||||
'type': 'counter',
|
||||
'object_relation': 'number-sections',
|
||||
'value': sections_number,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(elf_object)
|
||||
for section in fileinfo['sections']['section']:
|
||||
section_object = MISPObject('elf-section')
|
||||
for feature in ('name', 'type'):
|
||||
if section.get(feature):
|
||||
section_object.add_attribute(feature, **dict(type=attribute_type, value=section[feature], to_ids=False))
|
||||
section_object.add_attribute(
|
||||
**{
|
||||
'type': attribute_type,
|
||||
'object_relation': feature,
|
||||
'value': section[feature],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
if section.get('size'):
|
||||
section_object.add_attribute(size, **dict(type=size, value=int(section['size'], 16), to_ids=False))
|
||||
section_object.add_attribute(
|
||||
**{
|
||||
'type': size,
|
||||
'object_relation': size,
|
||||
'value': int(section['size'], 16),
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for flag in section['flagsdesc']:
|
||||
try:
|
||||
attribute_value = elf_section_flags_mapping[flag]
|
||||
section_object.add_attribute('flag', **dict(type=attribute_type, value=attribute_value, to_ids=False))
|
||||
section_object.add_attribute(
|
||||
**{
|
||||
'type': attribute_type,
|
||||
'object_relation': 'flag',
|
||||
'value': attribute_value,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
except KeyError:
|
||||
print(f'Unknown elf section flag: {flag}')
|
||||
continue
|
||||
self.misp_event.add_object(**section_object)
|
||||
self.misp_event.add_object(section_object)
|
||||
self.references[elf_object.uuid].append(dict(referenced_uuid=section_object.uuid,
|
||||
relationship_type=relationship))
|
||||
|
||||
def parse_pe(self, fileinfo, file_object):
|
||||
if not self.import_pe:
|
||||
return
|
||||
try:
|
||||
peinfo = fileinfo['pe']
|
||||
except KeyError:
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.misp_event.add_object(file_object)
|
||||
return
|
||||
pe_object = MISPObject('pe')
|
||||
relationship = 'includes'
|
||||
file_object.add_reference(pe_object.uuid, relationship)
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.misp_event.add_object(file_object)
|
||||
for field, mapping in pe_object_fields.items():
|
||||
attribute_type, object_relation = mapping
|
||||
pe_object.add_attribute(object_relation, **{'type': attribute_type, 'value': peinfo[field], 'to_ids': False})
|
||||
pe_object.add_attribute('compilation-timestamp', **{'type': 'datetime', 'value': int(peinfo['timestamp'].split()[0], 16), 'to_ids': False})
|
||||
if peinfo.get(field) is not None:
|
||||
attribute = {'value': peinfo[field], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
pe_object.add_attribute(**attribute)
|
||||
pe_object.add_attribute(
|
||||
**{
|
||||
'type': 'datetime',
|
||||
'object_relation': 'compilation-timestamp',
|
||||
'value': int(peinfo['timestamp'].split()[0], 16),
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
program_name = fileinfo['filename']
|
||||
if peinfo['versions']:
|
||||
for feature in peinfo['versions']['version']:
|
||||
|
@ -312,33 +416,57 @@ class JoeParser():
|
|||
if name == 'InternalName':
|
||||
program_name = feature['value']
|
||||
if name in pe_object_mapping:
|
||||
pe_object.add_attribute(pe_object_mapping[name], **{'type': 'text', 'value': feature['value'], 'to_ids': False})
|
||||
pe_object.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': pe_object_mapping[name],
|
||||
'value': feature['value'],
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
sections_number = len(peinfo['sections']['section'])
|
||||
pe_object.add_attribute('number-sections', **{'type': 'counter', 'value': sections_number, 'to_ids': False})
|
||||
pe_object.add_attribute(
|
||||
**{
|
||||
'type': 'counter',
|
||||
'object_relation': 'number-sections',
|
||||
'value': sections_number,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
signatureinfo = peinfo['signature']
|
||||
if signatureinfo['signed']:
|
||||
signerinfo_object = MISPObject('authenticode-signerinfo')
|
||||
pe_object.add_reference(signerinfo_object.uuid, 'signed-by')
|
||||
self.misp_event.add_object(**pe_object)
|
||||
signerinfo_object.add_attribute('program-name', **{'type': 'text', 'value': program_name, 'to_ids': False})
|
||||
self.misp_event.add_object(pe_object)
|
||||
signerinfo_object.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': 'program-name',
|
||||
'value': program_name,
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
for feature, mapping in signerinfo_object_mapping.items():
|
||||
attribute_type, object_relation = mapping
|
||||
signerinfo_object.add_attribute(object_relation, **{'type': attribute_type, 'value': signatureinfo[feature], 'to_ids': False})
|
||||
self.misp_event.add_object(**signerinfo_object)
|
||||
if signatureinfo.get(feature) is not None:
|
||||
attribute = {'value': signatureinfo[feature], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
signerinfo_object.add_attribute(**attribute)
|
||||
self.misp_event.add_object(signerinfo_object)
|
||||
else:
|
||||
self.misp_event.add_object(**pe_object)
|
||||
self.misp_event.add_object(pe_object)
|
||||
for section in peinfo['sections']['section']:
|
||||
section_object = self.parse_pe_section(section)
|
||||
self.references[pe_object.uuid].append(dict(referenced_uuid=section_object.uuid,
|
||||
relationship_type=relationship))
|
||||
self.misp_event.add_object(**section_object)
|
||||
self.misp_event.add_object(section_object)
|
||||
|
||||
def parse_pe_section(self, section):
|
||||
section_object = MISPObject('pe-section')
|
||||
for feature, mapping in pe_section_object_mapping.items():
|
||||
if section.get(feature):
|
||||
attribute_type, object_relation = mapping
|
||||
section_object.add_attribute(object_relation, **{'type': attribute_type, 'value': section[feature], 'to_ids': False})
|
||||
if section.get(feature) is not None:
|
||||
attribute = {'value': section[feature], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
section_object.add_attribute(**attribute)
|
||||
return section_object
|
||||
|
||||
def parse_network_interactions(self):
|
||||
|
@ -348,10 +476,11 @@ class JoeParser():
|
|||
if domain['@ip'] != 'unknown':
|
||||
domain_object = MISPObject('domain-ip')
|
||||
for key, mapping in domain_object_mapping.items():
|
||||
attribute_type, object_relation = mapping
|
||||
domain_object.add_attribute(object_relation,
|
||||
**{'type': attribute_type, 'value': domain[key], 'to_ids': False})
|
||||
self.misp_event.add_object(**domain_object)
|
||||
if domain.get(key) is not None:
|
||||
attribute = {'value': domain[key], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
domain_object.add_attribute(**attribute)
|
||||
self.misp_event.add_object(domain_object)
|
||||
reference = dict(referenced_uuid=domain_object.uuid, relationship_type='contacts')
|
||||
self.add_process_reference(domain['@targetid'], domain['@currentpath'], reference)
|
||||
else:
|
||||
|
@ -394,10 +523,19 @@ class JoeParser():
|
|||
for call in registryactivities[feature]['call']:
|
||||
registry_key = MISPObject('registry-key')
|
||||
for field, mapping in regkey_object_mapping.items():
|
||||
attribute_type, object_relation = mapping
|
||||
registry_key.add_attribute(object_relation, **{'type': attribute_type, 'value': call[field], 'to_ids': False})
|
||||
registry_key.add_attribute('data-type', **{'type': 'text', 'value': 'REG_{}'.format(call['type'].upper()), 'to_ids': False})
|
||||
self.misp_event.add_object(**registry_key)
|
||||
if call.get(field) is not None:
|
||||
attribute = {'value': call[field], 'to_ids': False}
|
||||
attribute.update(mapping)
|
||||
registry_key.add_attribute(**attribute)
|
||||
registry_key.add_attribute(
|
||||
**{
|
||||
'type': 'text',
|
||||
'object_relation': 'data-type',
|
||||
'value': f"REG_{call['type'].upper()}",
|
||||
'to_ids': False
|
||||
}
|
||||
)
|
||||
self.misp_event.add_object(registry_key)
|
||||
self.references[process_uuid].append(dict(referenced_uuid=registry_key.uuid,
|
||||
relationship_type=relationship))
|
||||
|
||||
|
@ -427,8 +565,9 @@ class JoeParser():
|
|||
|
||||
@staticmethod
|
||||
def prefetch_attributes_data(connection):
|
||||
attributes = {}
|
||||
attributes = []
|
||||
for field, value in zip(network_behavior_fields, connection):
|
||||
attribute_type, object_relation = network_connection_object_mapping[field]
|
||||
attributes[object_relation] = {'type': attribute_type, 'value': value, 'to_ids': False}
|
||||
attribute = {'value': value, 'to_ids': False}
|
||||
attribute.update(network_connection_object_mapping[field])
|
||||
attributes.append(attribute)
|
||||
return attributes
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 9dc7e3578f2165e32a3b7cdd09e9e552f2d98d36
|
|
@ -0,0 +1,263 @@
|
|||
# Copyright (c) 2009-2021 Qintel, LLC
|
||||
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
|
||||
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.parse import urlencode
|
||||
from urllib.error import HTTPError
|
||||
from time import sleep
|
||||
from json import loads
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
from gzip import GzipFile
|
||||
|
||||
VERSION = '1.0.1'
|
||||
USER_AGENT = 'integrations-helper'
|
||||
MAX_RETRY_ATTEMPTS = 5
|
||||
|
||||
DEFAULT_HEADERS = {
|
||||
'User-Agent': f'{USER_AGENT}/{VERSION}'
|
||||
}
|
||||
|
||||
REMOTE_MAP = {
|
||||
'pmi': 'https://api.pmi.qintel.com',
|
||||
'qwatch': 'https://api.qwatch.qintel.com',
|
||||
'qauth': 'https://api.qauth.qintel.com',
|
||||
'qsentry_feed': 'https://qsentry.qintel.com',
|
||||
'qsentry': 'https://api.qsentry.qintel.com'
|
||||
}
|
||||
|
||||
ENDPOINT_MAP = {
|
||||
'pmi': {
|
||||
'ping': '/users/me',
|
||||
'cve': 'cves'
|
||||
},
|
||||
'qsentry_feed': {
|
||||
'anon': '/files/anonymization',
|
||||
'mal_hosting': '/files/malicious_hosting'
|
||||
},
|
||||
'qsentry': {},
|
||||
'qwatch': {
|
||||
'ping': '/users/me',
|
||||
'exposures': 'exposures'
|
||||
},
|
||||
'qauth': {}
|
||||
}
|
||||
|
||||
|
||||
def _get_request_wait_time(attempts):
|
||||
""" Use Fibonacci numbers for determining the time to wait when rate limits
|
||||
have been encountered.
|
||||
"""
|
||||
|
||||
n = attempts + 3
|
||||
a, b = 1, 0
|
||||
for _ in range(n):
|
||||
a, b = a + b, a
|
||||
|
||||
return a
|
||||
|
||||
|
||||
def _search(**kwargs):
|
||||
remote = kwargs.get('remote')
|
||||
max_retries = int(kwargs.get('max_retries', MAX_RETRY_ATTEMPTS))
|
||||
params = kwargs.get('params', {})
|
||||
headers = _set_headers(**kwargs)
|
||||
|
||||
logger = kwargs.get('logger')
|
||||
|
||||
params = urlencode(params)
|
||||
url = remote + "?" + params
|
||||
req = Request(url, headers=headers)
|
||||
|
||||
request_attempts = 1
|
||||
while request_attempts < max_retries:
|
||||
try:
|
||||
return urlopen(req)
|
||||
|
||||
except HTTPError as e:
|
||||
response = e
|
||||
|
||||
except Exception as e:
|
||||
raise Exception('API connection error') from e
|
||||
|
||||
if response.code not in [429, 504]:
|
||||
raise Exception(f'API connection error: {response}')
|
||||
|
||||
if request_attempts < max_retries:
|
||||
wait_time = _get_request_wait_time(request_attempts)
|
||||
|
||||
if response.code == 429:
|
||||
msg = 'rate limit reached on attempt {request_attempts}, ' \
|
||||
'waiting {wait_time} seconds'
|
||||
|
||||
if logger:
|
||||
logger(msg)
|
||||
|
||||
else:
|
||||
msg = f'connection timed out, retrying in {wait_time} seconds'
|
||||
if logger:
|
||||
logger(msg)
|
||||
|
||||
sleep(wait_time)
|
||||
|
||||
else:
|
||||
raise Exception('Max API retries exceeded')
|
||||
|
||||
request_attempts += 1
|
||||
|
||||
|
||||
def _set_headers(**kwargs):
|
||||
headers = deepcopy(DEFAULT_HEADERS)
|
||||
|
||||
if kwargs.get('user_agent'):
|
||||
headers['User-Agent'] = \
|
||||
f"{kwargs['user_agent']}/{USER_AGENT}/{VERSION}"
|
||||
|
||||
# TODO: deprecate
|
||||
if kwargs.get('client_id') or kwargs.get('client_secret'):
|
||||
try:
|
||||
headers['Cf-Access-Client-Id'] = kwargs['client_id']
|
||||
headers['Cf-Access-Client-Secret'] = kwargs['client_secret']
|
||||
except KeyError:
|
||||
raise Exception('missing client_id or client_secret')
|
||||
|
||||
if kwargs.get('token'):
|
||||
headers['x-api-key'] = kwargs['token']
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _set_remote(product, query_type, **kwargs):
|
||||
remote = kwargs.get('remote')
|
||||
endpoint = kwargs.get('endpoint', ENDPOINT_MAP[product].get(query_type))
|
||||
|
||||
if not remote:
|
||||
remote = REMOTE_MAP[product]
|
||||
|
||||
if not endpoint:
|
||||
raise Exception('invalid search type')
|
||||
|
||||
remote = remote.rstrip('/')
|
||||
endpoint = endpoint.lstrip('/')
|
||||
|
||||
return f'{remote}/{endpoint}'
|
||||
|
||||
|
||||
def _process_qsentry(resp):
|
||||
if resp.getheader('Content-Encoding', '') == 'gzip':
|
||||
with GzipFile(fileobj=resp) as file:
|
||||
for line in file.readlines():
|
||||
yield loads(line)
|
||||
|
||||
|
||||
def search_pmi(search_term, query_type, **kwargs):
|
||||
"""
|
||||
Search PMI
|
||||
|
||||
:param str search_term: Search term
|
||||
:param str query_type: Query type [cve|ping]
|
||||
:param dict kwargs: extra client args [remote|token|params]
|
||||
:return: API JSON response object
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
kwargs['remote'] = _set_remote('pmi', query_type, **kwargs)
|
||||
kwargs['token'] = kwargs.get('token', os.getenv('PMI_TOKEN'))
|
||||
|
||||
params = kwargs.get('params', {})
|
||||
params.update({'identifier': search_term})
|
||||
kwargs['params'] = params
|
||||
|
||||
return loads(_search(**kwargs).read())
|
||||
|
||||
|
||||
def search_qwatch(search_term, search_type, query_type, **kwargs):
|
||||
"""
|
||||
Search QWatch for exposed credentials
|
||||
|
||||
:param str search_term: Search term
|
||||
:param str search_type: Search term type [domain|email]
|
||||
:param str query_type: Query type [exposures]
|
||||
:param dict kwargs: extra client args [remote|token|params]
|
||||
:return: API JSON response object
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
kwargs['remote'] = _set_remote('qwatch', query_type, **kwargs)
|
||||
kwargs['token'] = kwargs.get('token', os.getenv('QWATCH_TOKEN'))
|
||||
|
||||
params = kwargs.get('params', {})
|
||||
if search_type:
|
||||
params.update({search_type: search_term})
|
||||
kwargs['params'] = params
|
||||
|
||||
return loads(_search(**kwargs).read())
|
||||
|
||||
|
||||
def search_qauth(search_term, **kwargs):
|
||||
"""
|
||||
Search QAuth
|
||||
|
||||
:param str search_term: Search term
|
||||
:param dict kwargs: extra client args [remote|token|params]
|
||||
:return: API JSON response object
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
if not kwargs.get('endpoint'):
|
||||
kwargs['endpoint'] = '/'
|
||||
|
||||
kwargs['remote'] = _set_remote('qauth', None, **kwargs)
|
||||
kwargs['token'] = kwargs.get('token', os.getenv('QAUTH_TOKEN'))
|
||||
|
||||
params = kwargs.get('params', {})
|
||||
params.update({'q': search_term})
|
||||
kwargs['params'] = params
|
||||
|
||||
return loads(_search(**kwargs).read())
|
||||
|
||||
|
||||
def search_qsentry(search_term, **kwargs):
|
||||
"""
|
||||
Search QSentry
|
||||
|
||||
:param str search_term: Search term
|
||||
:param dict kwargs: extra client args [remote|token|params]
|
||||
:return: API JSON response object
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
if not kwargs.get('endpoint'):
|
||||
kwargs['endpoint'] = '/'
|
||||
|
||||
kwargs['remote'] = _set_remote('qsentry', None, **kwargs)
|
||||
kwargs['token'] = kwargs.get('token', os.getenv('QSENTRY_TOKEN'))
|
||||
|
||||
params = kwargs.get('params', {})
|
||||
params.update({'q': search_term})
|
||||
kwargs['params'] = params
|
||||
|
||||
return loads(_search(**kwargs).read())
|
||||
|
||||
|
||||
def qsentry_feed(query_type='anon', feed_date=datetime.today(), **kwargs):
|
||||
"""
|
||||
Fetch the most recent QSentry Feed
|
||||
|
||||
:param str query_type: Feed type [anon|mal_hosting]
|
||||
:param dict kwargs: extra client args [remote|token|params]
|
||||
:param datetime feed_date: feed date to fetch
|
||||
:return: API JSON response object
|
||||
:rtype: Iterator[dict]
|
||||
"""
|
||||
|
||||
remote = _set_remote('qsentry_feed', query_type, **kwargs)
|
||||
kwargs['token'] = kwargs.get('token', os.getenv('QSENTRY_TOKEN'))
|
||||
|
||||
feed_date = (feed_date - timedelta(days=1)).strftime('%Y%m%d')
|
||||
kwargs['remote'] = f'{remote}/{feed_date}'
|
||||
|
||||
resp = _search(**kwargs)
|
||||
for r in _process_qsentry(resp):
|
||||
yield r
|
|
@ -0,0 +1,460 @@
|
|||
################################################################################
|
||||
# ATTRIBUTES AND OBJECTS MAPPING #
|
||||
################################################################################
|
||||
|
||||
attributes_mapping = {
|
||||
'filename': '_parse_name',
|
||||
'ip-src': '_parse_value',
|
||||
'ip-dst': '_parse_value',
|
||||
'hostname': '_parse_value',
|
||||
'domain': '_parse_value',
|
||||
'domain|ip': '_parse_domain_ip_attribute',
|
||||
'email-src': '_parse_value',
|
||||
'email-dst': '_parse_value',
|
||||
'email-attachment': '_parse_name',
|
||||
'url': '_parse_value',
|
||||
'regkey': '_parse_regkey_attribute',
|
||||
'regkey|value': '_parse_regkey_value',
|
||||
'malware-sample': '_parse_malware_sample',
|
||||
'mutex': '_parse_name',
|
||||
'uri': '_parse_value',
|
||||
'port': '_parse_port',
|
||||
'ip-dst|port': '_parse_network_attribute',
|
||||
'ip-src|port': '_parse_network_attribute',
|
||||
'hostname|port': '_parse_network_attribute',
|
||||
'email-reply-to': '_parse_email_reply_to',
|
||||
'attachment': '_parse_attachment',
|
||||
'mac-address': '_parse_value',
|
||||
'AS': '_parse_number'
|
||||
}
|
||||
|
||||
attributes_type_mapping = {
|
||||
'md5': '_parse_hash',
|
||||
'sha1': '_parse_hash',
|
||||
'sha256': '_parse_hash',
|
||||
'filename|md5': '_parse_filename_hash',
|
||||
'filename|sha1': '_parse_filename_hash',
|
||||
'filename|sha256': '_parse_filename_hash',
|
||||
'email-subject': '_parse_email_message',
|
||||
'email-body': '_parse_email_message',
|
||||
'authentihash': '_parse_hash',
|
||||
'ssdeep': '_parse_hash',
|
||||
'imphash': '_parse_hash',
|
||||
'pehash': '_parse_hash',
|
||||
'impfuzzy': '_parse_hash',
|
||||
'sha224': '_parse_hash',
|
||||
'sha384': '_parse_hash',
|
||||
'sha512': '_parse_hash',
|
||||
'sha512/224': '_parse_hash',
|
||||
'sha512/256': '_parse_hash',
|
||||
'tlsh': '_parse_hash',
|
||||
'cdhash': '_parse_hash',
|
||||
'filename|authentihash': '_parse_filename_hash',
|
||||
'filename|ssdeep': '_parse_filename_hash',
|
||||
'filename|imphash': '_parse_filename_hash',
|
||||
'filename|impfuzzy': '_parse_filename_hash',
|
||||
'filename|pehash': '_parse_filename_hash',
|
||||
'filename|sha224': '_parse_filename_hash',
|
||||
'filename|sha384': '_parse_filename_hash',
|
||||
'filename|sha512': '_parse_filename_hash',
|
||||
'filename|sha512/224': '_parse_filename_hash',
|
||||
'filename|sha512/256': '_parse_filename_hash',
|
||||
'filename|tlsh': '_parse_filename_hash',
|
||||
'x509-fingerprint-md5': '_parse_x509_attribute',
|
||||
'x509-fingerprint-sha1': '_parse_x509_attribute',
|
||||
'x509-fingerprint-sha256': '_parse_x509_attribute'
|
||||
}
|
||||
|
||||
objects_mapping = {
|
||||
'asn': {
|
||||
'observable': 'parse_asn_observable',
|
||||
'pattern': 'parse_asn_pattern'},
|
||||
'credential': {
|
||||
'observable': 'parse_credential_observable',
|
||||
'pattern': 'parse_credential_pattern'},
|
||||
'domain-ip': {
|
||||
'observable': 'parse_domain_ip_observable',
|
||||
'pattern': 'parse_domain_ip_pattern'},
|
||||
'email': {
|
||||
'observable': 'parse_email_observable',
|
||||
'pattern': 'parse_email_pattern'},
|
||||
'file': {
|
||||
'observable': 'parse_file_observable',
|
||||
'pattern': 'parse_file_pattern'},
|
||||
'ip-port': {
|
||||
'observable': 'parse_ip_port_observable',
|
||||
'pattern': 'parse_ip_port_pattern'},
|
||||
'network-connection': {
|
||||
'observable': 'parse_network_connection_observable',
|
||||
'pattern': 'parse_network_connection_pattern'},
|
||||
'network-socket': {
|
||||
'observable': 'parse_network_socket_observable',
|
||||
'pattern': 'parse_network_socket_pattern'},
|
||||
'process': {
|
||||
'observable': 'parse_process_observable',
|
||||
'pattern': 'parse_process_pattern'},
|
||||
'registry-key': {
|
||||
'observable': 'parse_regkey_observable',
|
||||
'pattern': 'parse_regkey_pattern'},
|
||||
'url': {
|
||||
'observable': 'parse_url_observable',
|
||||
'pattern': 'parse_url_pattern'},
|
||||
'user-account': {
|
||||
'observable': 'parse_user_account_observable',
|
||||
'pattern': 'parse_user_account_pattern'},
|
||||
'WindowsPEBinaryFile': {
|
||||
'observable': 'parse_pe_observable',
|
||||
'pattern': 'parse_pe_pattern'},
|
||||
'x509': {
|
||||
'observable': 'parse_x509_observable',
|
||||
'pattern': 'parse_x509_pattern'}
|
||||
}
|
||||
|
||||
observable_mapping = {
|
||||
('artifact', 'file'): 'parse_file_observable',
|
||||
('artifact', 'directory', 'file'): 'parse_file_observable',
|
||||
('artifact', 'email-addr', 'email-message', 'file'): 'parse_email_observable',
|
||||
('autonomous-system',): 'parse_asn_observable',
|
||||
('autonomous-system', 'ipv4-addr'): 'parse_asn_observable',
|
||||
('autonomous-system', 'ipv6-addr'): 'parse_asn_observable',
|
||||
('autonomous-system', 'ipv4-addr', 'ipv6-addr'): 'parse_asn_observable',
|
||||
('directory', 'file'): 'parse_file_observable',
|
||||
('domain-name',): 'parse_domain_ip_observable',
|
||||
('domain-name', 'ipv4-addr'): 'parse_domain_ip_observable',
|
||||
('domain-name', 'ipv6-addr'): 'parse_domain_ip_observable',
|
||||
('domain-name', 'ipv4-addr', 'ipv6-addr'): 'parse_domain_ip_observable',
|
||||
('domain-name', 'ipv4-addr', 'network-traffic'): 'parse_domain_ip_network_traffic_observable',
|
||||
('domain-name', 'ipv6-addr', 'network-traffic'): 'parse_domain_ip_network_traffic_observable',
|
||||
('domain-name', 'ipv4-addr', 'ipv6-addr', 'network-traffic'): 'parse_domain_ip_network_traffic_observable',
|
||||
('domain-name', 'network-traffic'): 'parse_domain_network_traffic_observable',
|
||||
('domain-name', 'network-traffic', 'url'): 'parse_url_observable',
|
||||
('email-addr',): 'parse_email_address_observable',
|
||||
('email-addr', 'email-message'): 'parse_email_observable',
|
||||
('email-addr', 'email-message', 'file'): 'parse_email_observable',
|
||||
('email-message',): 'parse_email_observable',
|
||||
('file',): 'parse_file_observable',
|
||||
('file', 'process'): 'parse_process_observable',
|
||||
('ipv4-addr',): 'parse_ip_address_observable',
|
||||
('ipv6-addr',): 'parse_ip_address_observable',
|
||||
('ipv4-addr', 'network-traffic'): 'parse_ip_network_traffic_observable',
|
||||
('ipv6-addr', 'network-traffic'): 'parse_ip_network_traffic_observable',
|
||||
('ipv4-addr', 'ipv6-addr', 'network-traffic'): 'parse_ip_network_traffic_observable',
|
||||
('mac-addr',): 'parse_mac_address_observable',
|
||||
('mutex',): 'parse_mutex_observable',
|
||||
('process',): 'parse_process_observable',
|
||||
('x509-certificate',): 'parse_x509_observable',
|
||||
('url',): 'parse_url_observable',
|
||||
('user-account',): 'parse_user_account_observable',
|
||||
('windows-registry-key',): 'parse_regkey_observable'
|
||||
}
|
||||
|
||||
pattern_mapping = {
|
||||
('artifact', 'file'): 'parse_file_pattern',
|
||||
('artifact', 'directory', 'file'): 'parse_file_pattern',
|
||||
('autonomous-system', ): 'parse_as_pattern',
|
||||
('autonomous-system', 'ipv4-addr'): 'parse_as_pattern',
|
||||
('autonomous-system', 'ipv6-addr'): 'parse_as_pattern',
|
||||
('autonomous-system', 'ipv4-addr', 'ipv6-addr'): 'parse_as_pattern',
|
||||
('directory',): 'parse_file_pattern',
|
||||
('directory', 'file'): 'parse_file_pattern',
|
||||
('domain-name',): 'parse_domain_ip_port_pattern',
|
||||
('domain-name', 'ipv4-addr'): 'parse_domain_ip_port_pattern',
|
||||
('domain-name', 'ipv6-addr'): 'parse_domain_ip_port_pattern',
|
||||
('domain-name', 'ipv4-addr', 'ipv6-addr'): 'parse_domain_ip_port_pattern',
|
||||
('domain-name', 'ipv4-addr', 'url'): 'parse_url_pattern',
|
||||
('domain-name', 'ipv6-addr', 'url'): 'parse_url_pattern',
|
||||
('domain-name', 'ipv4-addr', 'ipv6-addr', 'url'): 'parse_url_pattern',
|
||||
('domain-name', 'network-traffic'): 'parse_domain_ip_port_pattern',
|
||||
('domain-name', 'network-traffic', 'url'): 'parse_url_pattern',
|
||||
('email-addr',): 'parse_email_address_pattern',
|
||||
('email-message',): 'parse_email_message_pattern',
|
||||
('file',): 'parse_file_pattern',
|
||||
('ipv4-addr',): 'parse_ip_address_pattern',
|
||||
('ipv6-addr',): 'parse_ip_address_pattern',
|
||||
('ipv4-addr', 'ipv6-addr'): 'parse_ip_address_pattern',
|
||||
('mac-addr',): 'parse_mac_address_pattern',
|
||||
('mutex',): 'parse_mutex_pattern',
|
||||
('network-traffic',): 'parse_network_traffic_pattern',
|
||||
('process',): 'parse_process_pattern',
|
||||
('url',): 'parse_url_pattern',
|
||||
('user-account',): 'parse_user_account_pattern',
|
||||
('windows-registry-key',): 'parse_regkey_pattern',
|
||||
('x509-certificate',): 'parse_x509_pattern'
|
||||
}
|
||||
|
||||
pattern_forbidden_relations = (' LIKE ', ' FOLLOWEDBY ', ' MATCHES ', ' ISSUBSET ', ' ISSUPERSET ', ' REPEATS ')
|
||||
single_attribute_fields = ('type', 'value', 'to_ids')
|
||||
|
||||
|
||||
################################################################################
|
||||
# OBSERVABLE OBJECTS AND PATTERNS MAPPING. #
|
||||
################################################################################
|
||||
|
||||
address_family_attribute_mapping = {'type': 'text','object_relation': 'address-family'}
|
||||
as_number_attribute_mapping = {'type': 'AS', 'object_relation': 'asn'}
|
||||
description_attribute_mapping = {'type': 'text', 'object_relation': 'description'}
|
||||
asn_subnet_attribute_mapping = {'type': 'ip-src', 'object_relation': 'subnet-announced'}
|
||||
cc_attribute_mapping = {'type': 'email-dst', 'object_relation': 'cc'}
|
||||
credential_attribute_mapping = {'type': 'text', 'object_relation': 'password'}
|
||||
data_attribute_mapping = {'type': 'text', 'object_relation': 'data'}
|
||||
data_type_attribute_mapping = {'type': 'text', 'object_relation': 'data-type'}
|
||||
domain_attribute_mapping = {'type': 'domain', 'object_relation': 'domain'}
|
||||
domain_family_attribute_mapping = {'type': 'text', 'object_relation': 'domain-family'}
|
||||
dst_port_attribute_mapping = {'type': 'port', 'object_relation': 'dst-port'}
|
||||
email_attachment_attribute_mapping = {'type': 'email-attachment', 'object_relation': 'attachment'}
|
||||
email_date_attribute_mapping = {'type': 'datetime', 'object_relation': 'send-date'}
|
||||
email_subject_attribute_mapping = {'type': 'email-subject', 'object_relation': 'subject'}
|
||||
encoding_attribute_mapping = {'type': 'text', 'object_relation': 'file-encoding'}
|
||||
end_datetime_attribute_mapping = {'type': 'datetime', 'object_relation': 'last-seen'}
|
||||
entropy_mapping = {'type': 'float', 'object_relation': 'entropy'}
|
||||
filename_attribute_mapping = {'type': 'filename', 'object_relation': 'filename'}
|
||||
from_attribute_mapping = {'type': 'email-src', 'object_relation': 'from'}
|
||||
imphash_mapping = {'type': 'imphash', 'object_relation': 'imphash'}
|
||||
id_attribute_mapping = {'type': 'text', 'object_relation': 'id'}
|
||||
ip_attribute_mapping = {'type': 'ip-dst', 'object_relation': 'ip'}
|
||||
issuer_attribute_mapping = {'type': 'text', 'object_relation': 'issuer'}
|
||||
key_attribute_mapping = {'type': 'regkey', 'object_relation': 'key'}
|
||||
malware_sample_attribute_mapping = {'type': 'malware-sample', 'object_relation': 'malware-sample'}
|
||||
mime_type_attribute_mapping = {'type': 'mime-type', 'object_relation': 'mimetype'}
|
||||
modified_attribute_mapping = {'type': 'datetime', 'object_relation': 'last-modified'}
|
||||
name_attribute_mapping = {'type': 'text', 'object_relation': 'name'}
|
||||
network_traffic_ip = {'type': 'ip-{}', 'object_relation': 'ip-{}'}
|
||||
number_sections_mapping = {'type': 'counter', 'object_relation': 'number-sections'}
|
||||
password_mapping = {'type': 'text', 'object_relation': 'password'}
|
||||
path_attribute_mapping = {'type': 'text', 'object_relation': 'path'}
|
||||
pe_type_mapping = {'type': 'text', 'object_relation': 'type'}
|
||||
pid_attribute_mapping = {'type': 'text', 'object_relation': 'pid'}
|
||||
process_command_line_mapping = {'type': 'text', 'object_relation': 'command-line'}
|
||||
process_creation_time_mapping = {'type': 'datetime', 'object_relation': 'creation-time'}
|
||||
process_image_mapping = {'type': 'filename', 'object_relation': 'image'}
|
||||
process_name_mapping = {'type': 'text', 'object_relation': 'name'}
|
||||
regkey_name_attribute_mapping = {'type': 'text', 'object_relation': 'name'}
|
||||
references_attribute_mapping = {'type': 'link', 'object_relation': 'references'}
|
||||
reply_to_attribute_mapping = {'type': 'email-reply-to', 'object_relation': 'reply-to'}
|
||||
screenshot_attribute_mapping = {'type': 'attachment', 'object_relation': 'screenshot'}
|
||||
section_name_mapping = {'type': 'text', 'object_relation': 'name'}
|
||||
serial_number_attribute_mapping = {'type': 'text', 'object_relation': 'serial-number'}
|
||||
size_attribute_mapping = {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'}
|
||||
src_port_attribute_mapping = {'type': 'port', 'object_relation': 'src-port'}
|
||||
start_datetime_attribute_mapping = {'type': 'datetime', 'object_relation': 'first-seen'}
|
||||
state_attribute_mapping = {'type': 'text', 'object_relation': 'state'}
|
||||
summary_attribute_mapping = {'type': 'text', 'object_relation': 'summary'}
|
||||
to_attribute_mapping = {'type': 'email-dst', 'object_relation': 'to'}
|
||||
url_attribute_mapping = {'type': 'url', 'object_relation': 'url'}
|
||||
url_port_attribute_mapping = {'type': 'port', 'object_relation': 'port'}
|
||||
user_id_mapping = {'type': 'text', 'object_relation': 'username'}
|
||||
x_mailer_attribute_mapping = {'type': 'email-x-mailer', 'object_relation': 'x-mailer'}
|
||||
x509_md5_attribute_mapping = {'type': 'x509-fingerprint-md5', 'object_relation': 'x509-fingerprint-md5'}
|
||||
x509_sha1_attribute_mapping = {'type': 'x509-fingerprint-sha1', 'object_relation': 'x509-fingerprint-sha1'}
|
||||
x509_sha256_attribute_mapping = {'type': 'x509-fingerprint-sha256', 'object_relation': 'x509-fingerprint-sha256'}
|
||||
x509_spka_attribute_mapping = {'type': 'text', 'object_relation': 'pubkey-info-algorithm'} # x509 subject public key algorithm
|
||||
x509_spke_attribute_mapping = {'type': 'text', 'object_relation': 'pubkey-info-exponent'} # x509 subject public key exponent
|
||||
x509_spkm_attribute_mapping = {'type': 'text', 'object_relation': 'pubkey-info-modulus'} # x509 subject public key modulus
|
||||
x509_subject_attribute_mapping = {'type': 'text', 'object_relation': 'subject'}
|
||||
x509_version_attribute_mapping = {'type': 'text', 'object_relation': 'version'}
|
||||
x509_vna_attribute_mapping = {'type': 'datetime', 'object_relation': 'validity-not-after'} # x509 validity not after
|
||||
x509_vnb_attribute_mapping = {'type': 'datetime', 'object_relation': 'validity-not-before'} # x509 validity not before
|
||||
|
||||
asn_mapping = {'number': as_number_attribute_mapping,
|
||||
'autonomous-system:number': as_number_attribute_mapping,
|
||||
'name': description_attribute_mapping,
|
||||
'autonomous-system:name': description_attribute_mapping,
|
||||
'ipv4-addr': asn_subnet_attribute_mapping,
|
||||
'ipv6-addr': asn_subnet_attribute_mapping,
|
||||
'ipv4-addr:value': asn_subnet_attribute_mapping,
|
||||
'ipv6-addr:value': asn_subnet_attribute_mapping}
|
||||
|
||||
attack_pattern_mapping = {'name': name_attribute_mapping,
|
||||
'description': summary_attribute_mapping}
|
||||
|
||||
attack_pattern_references_mapping = {'mitre-attack': references_attribute_mapping,
|
||||
'capec': id_attribute_mapping}
|
||||
|
||||
course_of_action_mapping = {'description': description_attribute_mapping,
|
||||
'name': name_attribute_mapping}
|
||||
|
||||
credential_mapping = {'credential': credential_attribute_mapping,
|
||||
'user-account:credential': credential_attribute_mapping,
|
||||
'user_id': user_id_mapping,
|
||||
'user-account:user_id': user_id_mapping}
|
||||
|
||||
domain_ip_mapping = {'domain-name': domain_attribute_mapping,
|
||||
'domain-name:value': domain_attribute_mapping,
|
||||
'ipv4-addr': ip_attribute_mapping,
|
||||
'ipv6-addr': ip_attribute_mapping,
|
||||
'ipv4-addr:value': ip_attribute_mapping,
|
||||
'ipv6-addr:value': ip_attribute_mapping,
|
||||
'domain-name:resolves_to_refs[*].value': ip_attribute_mapping,
|
||||
'network-traffic:dst_port': dst_port_attribute_mapping,
|
||||
'network-traffic:src_port': src_port_attribute_mapping}
|
||||
|
||||
email_mapping = {'date': email_date_attribute_mapping,
|
||||
'email-message:date': email_date_attribute_mapping,
|
||||
'email-message:to_refs[*].value': to_attribute_mapping,
|
||||
'email-message:cc_refs[*].value': cc_attribute_mapping,
|
||||
'subject': email_subject_attribute_mapping,
|
||||
'email-message:subject': email_subject_attribute_mapping,
|
||||
'X-Mailer': x_mailer_attribute_mapping,
|
||||
'email-message:additional_header_fields.x_mailer': x_mailer_attribute_mapping,
|
||||
'Reply-To': reply_to_attribute_mapping,
|
||||
'email-message:additional_header_fields.reply_to': reply_to_attribute_mapping,
|
||||
'email-message:from_ref.value': from_attribute_mapping,
|
||||
'email-addr:value': to_attribute_mapping}
|
||||
|
||||
email_references_mapping = {'attachment': email_attachment_attribute_mapping,
|
||||
'cc_refs': cc_attribute_mapping,
|
||||
'from_ref': from_attribute_mapping,
|
||||
'screenshot': screenshot_attribute_mapping,
|
||||
'to_refs': to_attribute_mapping}
|
||||
|
||||
file_mapping = {'artifact:mime_type': mime_type_attribute_mapping,
|
||||
'file:content_ref.mime_type': mime_type_attribute_mapping,
|
||||
'mime_type': mime_type_attribute_mapping,
|
||||
'file:mime_type': mime_type_attribute_mapping,
|
||||
'name': filename_attribute_mapping,
|
||||
'file:name': filename_attribute_mapping,
|
||||
'name_enc': encoding_attribute_mapping,
|
||||
'file:name_enc': encoding_attribute_mapping,
|
||||
'file:parent_directory_ref.path': path_attribute_mapping,
|
||||
'directory:path': path_attribute_mapping,
|
||||
'size': size_attribute_mapping,
|
||||
'file:size': size_attribute_mapping}
|
||||
|
||||
network_traffic_mapping = {'dst_port':dst_port_attribute_mapping,
|
||||
'src_port': src_port_attribute_mapping,
|
||||
'network-traffic:dst_port': dst_port_attribute_mapping,
|
||||
'network-traffic:src_port': src_port_attribute_mapping}
|
||||
|
||||
ip_port_mapping = {'value': domain_attribute_mapping,
|
||||
'domain-name:value': domain_attribute_mapping,
|
||||
'network-traffic:dst_ref.value': {'type': 'ip-dst', 'object_relation': 'ip-dst'},
|
||||
'network-traffic:src_ref.value': {'type': 'ip-src', 'object_relation': 'ip-src'}}
|
||||
ip_port_mapping.update(network_traffic_mapping)
|
||||
|
||||
ip_port_references_mapping = {'domain-name': domain_attribute_mapping,
|
||||
'ipv4-addr': network_traffic_ip,
|
||||
'ipv6-addr': network_traffic_ip}
|
||||
|
||||
network_socket_extension_mapping = {'address_family': address_family_attribute_mapping,
|
||||
"network-traffic:extensions.'socket-ext'.address_family": address_family_attribute_mapping,
|
||||
'protocol_family': domain_family_attribute_mapping,
|
||||
"network-traffic:extensions.'socket-ext'.protocol_family": domain_family_attribute_mapping,
|
||||
'is_blocking': state_attribute_mapping,
|
||||
"network-traffic:extensions.'socket-ext'.is_blocking": state_attribute_mapping,
|
||||
'is_listening': state_attribute_mapping,
|
||||
"network-traffic:extensions.'socket-ext'.is_listening": state_attribute_mapping}
|
||||
|
||||
network_traffic_references_mapping = {'domain-name': {'type': 'hostname', 'object_relation': 'hostname-{}'},
|
||||
'ipv4-addr': network_traffic_ip,
|
||||
'ipv6-addr': network_traffic_ip}
|
||||
|
||||
pe_mapping = {'pe_type': pe_type_mapping, 'number_of_sections': number_sections_mapping, 'imphash': imphash_mapping}
|
||||
|
||||
pe_section_mapping = {'name': section_name_mapping, 'size': size_attribute_mapping, 'entropy': entropy_mapping}
|
||||
|
||||
hash_types = ('MD5', 'SHA-1', 'SHA-256', 'SHA-224', 'SHA-384', 'SHA-512', 'ssdeep', 'tlsh')
|
||||
for hash_type in hash_types:
|
||||
misp_hash_type = hash_type.replace('-', '').lower()
|
||||
attribute = {'type': misp_hash_type, 'object_relation': misp_hash_type}
|
||||
file_mapping[hash_type] = attribute
|
||||
file_mapping.update({f"file:hashes.'{feature}'": attribute for feature in (hash_type, misp_hash_type)})
|
||||
file_mapping.update({f"file:hashes.{feature}": attribute for feature in (hash_type, misp_hash_type)})
|
||||
pe_section_mapping[hash_type] = attribute
|
||||
pe_section_mapping[misp_hash_type] = attribute
|
||||
|
||||
process_mapping = {'name': process_name_mapping,
|
||||
'process:name': process_name_mapping,
|
||||
'pid': pid_attribute_mapping,
|
||||
'process:pid': pid_attribute_mapping,
|
||||
'created': process_creation_time_mapping,
|
||||
'process:created': process_creation_time_mapping,
|
||||
'command_line': process_command_line_mapping,
|
||||
'process:command_line': process_command_line_mapping,
|
||||
'process:parent_ref.pid': {'type': 'text', 'object_relation': 'parent-pid'},
|
||||
'process:child_refs[*].pid': {'type': 'text', 'object_relation': 'child-pid'},
|
||||
'process:binary_ref.name': process_image_mapping}
|
||||
|
||||
child_process_reference_mapping = {'pid': {'type': 'text', 'object_relation': 'child-pid'}}
|
||||
|
||||
parent_process_reference_mapping = {'command_line': {'type': 'text', 'object_relation': 'parent-command-line'},
|
||||
'pid': {'type': 'text', 'object_relation': 'parent-pid'},
|
||||
'process-name': {'type': 'text', 'object_relation': 'parent-process-name'}}
|
||||
|
||||
regkey_mapping = {'data': data_attribute_mapping,
|
||||
'windows-registry-key:values.data': data_attribute_mapping,
|
||||
'data_type': data_type_attribute_mapping,
|
||||
'windows-registry-key:values.data_type': data_type_attribute_mapping,
|
||||
'modified': modified_attribute_mapping,
|
||||
'windows-registry-key:modified': modified_attribute_mapping,
|
||||
'name': regkey_name_attribute_mapping,
|
||||
'windows-registry-key:values.name': regkey_name_attribute_mapping,
|
||||
'key': key_attribute_mapping,
|
||||
'windows-registry-key:key': key_attribute_mapping,
|
||||
'windows-registry-key:value': {'type': 'text', 'object_relation': 'hive'}
|
||||
}
|
||||
|
||||
url_mapping = {'url': url_attribute_mapping,
|
||||
'url:value': url_attribute_mapping,
|
||||
'domain-name': domain_attribute_mapping,
|
||||
'domain-name:value': domain_attribute_mapping,
|
||||
'network-traffic': url_port_attribute_mapping,
|
||||
'network-traffic:dst_port': url_port_attribute_mapping,
|
||||
'ipv4-addr:value': ip_attribute_mapping,
|
||||
'ipv6-addr:value': ip_attribute_mapping
|
||||
}
|
||||
|
||||
user_account_mapping = {'account_created': {'type': 'datetime', 'object_relation': 'created'},
|
||||
'account_expires': {'type': 'datetime', 'object_relation': 'expires'},
|
||||
'account_first_login': {'type': 'datetime', 'object_relation': 'first_login'},
|
||||
'account_last_login': {'type': 'datetime', 'object_relation': 'last_login'},
|
||||
'account_login': user_id_mapping,
|
||||
'account_type': {'type': 'text', 'object_relation': 'account-type'},
|
||||
'can_escalate_privs': {'type': 'boolean', 'object_relation': 'can_escalate_privs'},
|
||||
'credential': credential_attribute_mapping,
|
||||
'credential_last_changed': {'type': 'datetime', 'object_relation': 'password_last_changed'},
|
||||
'display_name': {'type': 'text', 'object_relation': 'display-name'},
|
||||
'gid': {'type': 'text', 'object_relation': 'group-id'},
|
||||
'home_dir': {'type': 'text', 'object_relation': 'home_dir'},
|
||||
'is_disabled': {'type': 'boolean', 'object_relation': 'disabled'},
|
||||
'is_privileged': {'type': 'boolean', 'object_relation': 'privileged'},
|
||||
'is_service_account': {'type': 'boolean', 'object_relation': 'is_service_account'},
|
||||
'shell': {'type': 'text', 'object_relation': 'shell'},
|
||||
'user_id': {'type': 'text', 'object_relation': 'user-id'}}
|
||||
|
||||
vulnerability_mapping = {'name': id_attribute_mapping,
|
||||
'description': summary_attribute_mapping}
|
||||
|
||||
x509_mapping = {'issuer': issuer_attribute_mapping,
|
||||
'x509-certificate:issuer': issuer_attribute_mapping,
|
||||
'serial_number': serial_number_attribute_mapping,
|
||||
'x509-certificate:serial_number': serial_number_attribute_mapping,
|
||||
'subject': x509_subject_attribute_mapping,
|
||||
'x509-certificate:subject': x509_subject_attribute_mapping,
|
||||
'subject_public_key_algorithm': x509_spka_attribute_mapping,
|
||||
'x509-certificate:subject_public_key_algorithm': x509_spka_attribute_mapping,
|
||||
'subject_public_key_exponent': x509_spke_attribute_mapping,
|
||||
'x509-certificate:subject_public_key_exponent': x509_spke_attribute_mapping,
|
||||
'subject_public_key_modulus': x509_spkm_attribute_mapping,
|
||||
'x509-certificate:subject_public_key_modulus': x509_spkm_attribute_mapping,
|
||||
'validity_not_before': x509_vnb_attribute_mapping,
|
||||
'x509-certificate:validity_not_before': x509_vnb_attribute_mapping,
|
||||
'validity_not_after': x509_vna_attribute_mapping,
|
||||
'x509-certificate:validity_not_after': x509_vna_attribute_mapping,
|
||||
'version': x509_version_attribute_mapping,
|
||||
'x509-certificate:version': x509_version_attribute_mapping,
|
||||
'SHA-1': x509_sha1_attribute_mapping,
|
||||
"x509-certificate:hashes.'sha1'": x509_sha1_attribute_mapping,
|
||||
'SHA-256': x509_sha256_attribute_mapping,
|
||||
"x509-certificate:hashes.'sha256'": x509_sha256_attribute_mapping,
|
||||
'MD5': x509_md5_attribute_mapping,
|
||||
"x509-certificate:hashes.'md5'": x509_md5_attribute_mapping,
|
||||
}
|
||||
|
||||
attachment_types = ('file:content_ref.name', 'file:content_ref.payload_bin',
|
||||
'artifact:x_misp_text_name', 'artifact:payload_bin',
|
||||
"file:hashes.'MD5'", "file:content_ref.hashes.'MD5'",
|
||||
'file:name')
|
||||
|
||||
connection_protocols = {"IP": "3", "ICMP": "3", "ARP": "3",
|
||||
"TCP": "4", "UDP": "4",
|
||||
"HTTP": "7", "HTTPS": "7", "FTP": "7"}
|
|
@ -17,7 +17,8 @@ __all__ = ['cuckoo_submit', 'vmray_submit', 'bgpranking', 'circl_passivedns', 'c
|
|||
'virustotal_public', 'apiosintds', 'urlscan', 'securitytrails', 'apivoid',
|
||||
'assemblyline_submit', 'assemblyline_query', 'ransomcoindb', 'malwarebazaar',
|
||||
'lastline_query', 'lastline_submit', 'sophoslabs_intelix', 'cytomic_orion', 'censys_enrich',
|
||||
'trustar_enrich', 'recordedfuture', 'html_to_markdown', 'socialscan']
|
||||
'trustar_enrich', 'recordedfuture', 'html_to_markdown', 'socialscan', 'passive-ssh',
|
||||
'qintel_qsentry', 'mwdb', 'hashlookup', 'mmdb_lookup', 'ipqs_fraud_and_risk_scoring']
|
||||
|
||||
|
||||
minimum_required_fields = ('type', 'uuid', 'value')
|
||||
|
|
|
@ -4,8 +4,8 @@ from . import check_input_attribute, standard_error_message
|
|||
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['domain', 'hostname'], 'format': 'misp_standard'}
|
||||
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
|
||||
mispattributes = {'input': ['domain', 'hostname', 'email', 'email-src', 'email-dst'], 'format': 'misp_standard'}
|
||||
moduleinfo = {'version': '0.2', 'author': 'Christian Studer',
|
||||
'description': 'On demand query API for APIVoid.',
|
||||
'module-type': ['expansion', 'hover']}
|
||||
moduleconfig = ['apikey']
|
||||
|
@ -43,6 +43,31 @@ class APIVoidParser():
|
|||
ssl = requests.get(f'{self.url.format("sslinfo", apikey)}host={self.attribute.value}').json()
|
||||
self._parse_ssl_certificate(ssl['data']['certificate'])
|
||||
|
||||
def handle_email(self, apikey):
|
||||
feature = 'emailverify'
|
||||
if requests.get(f'{self.url.format(feature, apikey)}stats').json()['credits_remained'] < 0.06:
|
||||
self.result = {'error': 'You do not have enough APIVoid credits to proceed your request.'}
|
||||
return
|
||||
emaillookup = requests.get(f'{self.url.format(feature, apikey)}email={self.attribute.value}').json()
|
||||
email_verification = MISPObject('apivoid-email-verification')
|
||||
boolean_attributes = ['valid_format', 'suspicious_username', 'suspicious_email', 'dirty_words_username',
|
||||
'suspicious_email', 'valid_tld', 'disposable', 'has_a_records', 'has_mx_records',
|
||||
'has_spf_records', 'is_spoofable', 'dmarc_configured', 'dmarc_enforced', 'free_email',
|
||||
'russian_free_email', 'china_free_email', 'suspicious_domain', 'dirty_words_domain',
|
||||
'domain_popular', 'risky_tld', 'police_domain', 'government_domain', 'educational_domain',
|
||||
'should_block']
|
||||
for boolean_attribute in boolean_attributes:
|
||||
email_verification.add_attribute(boolean_attribute,
|
||||
**{'type': 'boolean', 'value': emaillookup['data'][boolean_attribute]})
|
||||
email_verification.add_attribute('email', **{'type': 'email', 'value': emaillookup['data']['email']})
|
||||
email_verification.add_attribute('username', **{'type': 'text', 'value': emaillookup['data']['username']})
|
||||
email_verification.add_attribute('role_address',
|
||||
**{'type': 'boolean', 'value': emaillookup['data']['role_address']})
|
||||
email_verification.add_attribute('domain', **{'type': 'domain', 'value': emaillookup['data']['domain']})
|
||||
email_verification.add_attribute('score', **{'type': 'float', 'value': emaillookup['data']['score']})
|
||||
email_verification.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(email_verification)
|
||||
|
||||
def _handle_dns_record(self, item, record_type, relationship):
|
||||
dns_record = MISPObject('dns-record')
|
||||
dns_record.add_attribute('queried-domain', type='domain', value=item['host'])
|
||||
|
@ -82,7 +107,10 @@ def handler(q=False):
|
|||
return {'error': 'Unsupported attribute type.'}
|
||||
apikey = request['config']['apikey']
|
||||
apivoid_parser = APIVoidParser(attribute)
|
||||
apivoid_parser.parse_domain(apikey)
|
||||
if attribute['type'] in ['domain', 'hostname']:
|
||||
apivoid_parser.parse_domain(apikey)
|
||||
else:
|
||||
apivoid_parser.handle_email(apikey)
|
||||
return apivoid_parser.get_results()
|
||||
|
||||
|
||||
|
|
|
@ -1,15 +1,26 @@
|
|||
# encoding: utf-8
|
||||
import json
|
||||
import configparser
|
||||
import base64
|
||||
import codecs
|
||||
import censys.common.config
|
||||
from dateutil.parser import isoparse
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
||||
|
||||
try:
|
||||
import censys.base
|
||||
import censys.ipv4
|
||||
import censys.websites
|
||||
import censys.certificates
|
||||
#needed in order to overwrite the censys module intent of creating config files in the home folder of the proccess owner
|
||||
#--
|
||||
def get_config_over() -> configparser.ConfigParser:
|
||||
config = configparser.ConfigParser()
|
||||
config[censys.common.config.DEFAULT] = censys.common.config.default_config
|
||||
return config
|
||||
censys.common.config.get_config = get_config_over
|
||||
#--
|
||||
|
||||
from censys.search import CensysHosts
|
||||
from censys.search import CensysCertificates
|
||||
from censys.common.base import *
|
||||
except ImportError:
|
||||
print("Censys module not installed. Try 'pip install censys'")
|
||||
|
||||
|
@ -20,8 +31,11 @@ mispattributes = {'input': ['ip-src', 'ip-dst', 'domain', 'hostname', 'hostname|
|
|||
moduleinfo = {'version': '0.1', 'author': 'Loïc Fortemps',
|
||||
'description': 'Censys.io expansion module', 'module-type': ['expansion', 'hover']}
|
||||
|
||||
api_id = None
|
||||
api_secret = None
|
||||
|
||||
def handler(q=False):
|
||||
global api_id, api_secret
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
@ -46,7 +60,6 @@ def handler(q=False):
|
|||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**request['attribute'])
|
||||
# Lists to accomodate multi-types attribute
|
||||
conn = list()
|
||||
types = list()
|
||||
values = list()
|
||||
results = list()
|
||||
|
@ -65,26 +78,29 @@ def handler(q=False):
|
|||
types.append(attribute.type)
|
||||
values.append(attribute.value)
|
||||
|
||||
found = False
|
||||
for t in types:
|
||||
# ip, ip-src or ip-dst
|
||||
if t[:2] == "ip":
|
||||
conn.append(censys.ipv4.CensysIPv4(api_id=api_id, api_secret=api_secret))
|
||||
elif t == 'domain' or t == "hostname":
|
||||
conn.append(censys.websites.CensysWebsites(api_id=api_id, api_secret=api_secret))
|
||||
elif 'x509-fingerprint' in t:
|
||||
conn.append(censys.certificates.CensysCertificates(api_id=api_id, api_secret=api_secret))
|
||||
|
||||
found = True
|
||||
for c in conn:
|
||||
val = values.pop(0)
|
||||
try:
|
||||
r = c.view(val)
|
||||
results.append(parse_response(r, attribute))
|
||||
found = True
|
||||
except censys.base.CensysNotFoundException:
|
||||
found = False
|
||||
except Exception:
|
||||
misperrors['error'] = "Connection issue"
|
||||
value = values.pop(0)
|
||||
# ip, ip-src or ip-dst
|
||||
if t[:2] == "ip":
|
||||
r = CensysHosts(api_id, api_secret).view(value)
|
||||
results.append(parse_response(r, attribute))
|
||||
found = True
|
||||
elif t == 'domain' or t == "hostname":
|
||||
# get ips
|
||||
endpoint = CensysHosts(api_id, api_secret)
|
||||
for r_list in endpoint.search(query=value, per_page=5, pages=1):
|
||||
for r in r_list:
|
||||
results.append(parse_response(r, attribute))
|
||||
found = True
|
||||
elif 'x509-fingerprint-sha256' in t:
|
||||
# use api_v1 as Certificates endpoint in api_v2 doesn't yet provide all the details
|
||||
r = CensysCertificates(api_id, api_secret).view(value)
|
||||
results.append(parse_response(r, attribute))
|
||||
found = True
|
||||
except CensysException as e:
|
||||
misperrors['error'] = "ERROR: param {} / response: {}".format(value, e)
|
||||
return misperrors
|
||||
|
||||
if not found:
|
||||
|
@ -98,38 +114,43 @@ def parse_response(censys_output, attribute):
|
|||
misp_event = MISPEvent()
|
||||
misp_event.add_attribute(**attribute)
|
||||
# Generic fields (for IP/Websites)
|
||||
if "autonomous_system" in censys_output:
|
||||
cen_as = censys_output['autonomous_system']
|
||||
if censys_output.get('autonomous_system'):
|
||||
cen_as = censys_output.get('autonomous_system')
|
||||
asn_object = MISPObject('asn')
|
||||
asn_object.add_attribute('asn', value=cen_as["asn"])
|
||||
asn_object.add_attribute('description', value=cen_as['name'])
|
||||
asn_object.add_attribute('subnet-announced', value=cen_as['routed_prefix'])
|
||||
asn_object.add_attribute('country', value=cen_as['country_code'])
|
||||
asn_object.add_attribute('asn', value=cen_as.get("asn"))
|
||||
asn_object.add_attribute('description', value=cen_as.get('name'))
|
||||
asn_object.add_attribute('subnet-announced', value=cen_as.get('routed_prefix'))
|
||||
asn_object.add_attribute('country', value=cen_as.get('country_code'))
|
||||
asn_object.add_reference(attribute.uuid, 'associated-to')
|
||||
misp_event.add_object(**asn_object)
|
||||
|
||||
if "ip" in censys_output and "ports" in censys_output:
|
||||
if censys_output.get('ip') and len(censys_output.get('services')): #"ports" in censys_output
|
||||
ip_object = MISPObject('ip-port')
|
||||
ip_object.add_attribute('ip', value=censys_output['ip'])
|
||||
for p in censys_output['ports']:
|
||||
ip_object.add_attribute('dst-port', value=p)
|
||||
ip_object.add_attribute('ip', value=censys_output.get('ip'))
|
||||
for serv in censys_output.get('services'):
|
||||
if serv.get('port'):
|
||||
ip_object.add_attribute('dst-port', value=serv.get('port'))
|
||||
ip_object.add_reference(attribute.uuid, 'associated-to')
|
||||
misp_event.add_object(**ip_object)
|
||||
|
||||
# We explore all ports to find https or ssh services
|
||||
for k in censys_output.keys():
|
||||
if not isinstance(censys_output[k], dict):
|
||||
for serv in censys_output.get('services', []):
|
||||
if not isinstance(serv, dict):
|
||||
continue
|
||||
if 'https' in censys_output[k]:
|
||||
if serv.get('service_name').lower() == 'http' and serv.get('certificate', None):
|
||||
try:
|
||||
cert = censys_output[k]['https']['tls']['certificate']
|
||||
cert_obj = get_certificate_object(cert, attribute)
|
||||
misp_event.add_object(**cert_obj)
|
||||
cert = serv.get('certificate', None)
|
||||
if cert:
|
||||
# TODO switch to api_v2 once available
|
||||
# use api_v1 as Certificates endpoint in api_v2 doesn't yet provide all the details
|
||||
cert_details = CensysCertificates(api_id, api_secret).view(cert)
|
||||
cert_obj = get_certificate_object(cert_details, attribute)
|
||||
misp_event.add_object(**cert_obj)
|
||||
except KeyError:
|
||||
print("Error !")
|
||||
if 'ssh' in censys_output[k]:
|
||||
if serv.get('ssh') and serv.get('service_name').lower() == 'ssh':
|
||||
try:
|
||||
cert = censys_output[k]['ssh']['v2']['server_host_key']
|
||||
cert = serv.get('ssh').get('server_host_key').get('fingerprint_sha256')
|
||||
# TODO enable once the type is merged
|
||||
# misp_event.add_attribute(type='hasshserver-sha256', value=cert['fingerprint_sha256'])
|
||||
except KeyError:
|
||||
|
@ -144,20 +165,20 @@ def parse_response(censys_output, attribute):
|
|||
if "location" in censys_output:
|
||||
loc_obj = MISPObject('geolocation')
|
||||
loc = censys_output['location']
|
||||
loc_obj.add_attribute('latitude', value=loc['latitude'])
|
||||
loc_obj.add_attribute('longitude', value=loc['longitude'])
|
||||
loc_obj.add_attribute('latitude', value=loc.get('coordinates', {}).get('latitude', None))
|
||||
loc_obj.add_attribute('longitude', value=loc.get('coordinates', {}).get('longitude', None))
|
||||
if 'city' in loc:
|
||||
loc_obj.add_attribute('city', value=loc['city'])
|
||||
loc_obj.add_attribute('country', value=loc['country'])
|
||||
loc_obj.add_attribute('city', value=loc.get('city'))
|
||||
loc_obj.add_attribute('country', value=loc.get('country'))
|
||||
if 'postal_code' in loc:
|
||||
loc_obj.add_attribute('zipcode', value=loc['postal_code'])
|
||||
loc_obj.add_attribute('zipcode', value=loc.get('postal_code'))
|
||||
if 'province' in loc:
|
||||
loc_obj.add_attribute('region', value=loc['province'])
|
||||
loc_obj.add_attribute('region', value=loc.get('province'))
|
||||
loc_obj.add_reference(attribute.uuid, 'associated-to')
|
||||
misp_event.add_object(**loc_obj)
|
||||
|
||||
event = json.loads(misp_event.to_json())
|
||||
return {'Object': event['Object'], 'Attribute': event['Attribute']}
|
||||
return {'Object': event.get('Object', []), 'Attribute': event.get('Attribute', [])}
|
||||
|
||||
|
||||
# In case of multiple enrichment (ip and domain), we need to filter out similar objects
|
||||
|
@ -166,24 +187,23 @@ def remove_duplicates(results):
|
|||
# Only one enrichment was performed so no duplicate
|
||||
if len(results) == 1:
|
||||
return results[0]
|
||||
elif len(results) == 2:
|
||||
final_result = results[0]
|
||||
obj_l2 = results[1]['Object']
|
||||
for o2 in obj_l2:
|
||||
if o2['name'] == "asn":
|
||||
key = "asn"
|
||||
elif o2['name'] == "ip-port":
|
||||
key = "ip"
|
||||
elif o2['name'] == "x509":
|
||||
key = "x509-fingerprint-sha256"
|
||||
elif o2['name'] == "geolocation":
|
||||
key = "latitude"
|
||||
if not check_if_present(o2, key, final_result['Object']):
|
||||
final_result['Object'].append(o2)
|
||||
|
||||
return final_result
|
||||
else:
|
||||
return []
|
||||
final_result = results[0]
|
||||
for i,result in enumerate(results[1:]):
|
||||
obj_l = results[i+1].get('Object', [])
|
||||
for o2 in obj_l:
|
||||
if o2['name'] == "asn":
|
||||
key = "asn"
|
||||
elif o2['name'] == "ip-port":
|
||||
key = "ip"
|
||||
elif o2['name'] == "x509":
|
||||
key = "x509-fingerprint-sha256"
|
||||
elif o2['name'] == "geolocation":
|
||||
key = "latitude"
|
||||
if not check_if_present(o2, key, final_result.get('Object', [])):
|
||||
final_result['Object'].append(o2)
|
||||
|
||||
return final_result
|
||||
|
||||
|
||||
def check_if_present(object, attribute_name, list_objects):
|
||||
|
@ -253,4 +273,4 @@ def introspection():
|
|||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
||||
return moduleinfo
|
|
@ -1,42 +1,44 @@
|
|||
import json
|
||||
import requests
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from falconpy import Intel
|
||||
from pymisp import MISPAttribute, MISPEvent
|
||||
|
||||
moduleinfo = {'version': '0.1',
|
||||
moduleinfo = {'version': '0.2',
|
||||
'author': 'Christophe Vandeplas',
|
||||
'description': 'Module to query CrowdStrike Falcon.',
|
||||
'module-type': ['expansion']}
|
||||
'module-type': ['expansion', 'hover']}
|
||||
moduleconfig = ['api_id', 'apikey']
|
||||
misperrors = {'error': 'Error'}
|
||||
misp_types_in = ['domain', 'email-attachment', 'email-dst', 'email-reply-to', 'email-src', 'email-subject',
|
||||
misp_type_in = ['domain', 'email-attachment', 'email-dst', 'email-reply-to', 'email-src', 'email-subject',
|
||||
'filename', 'hostname', 'ip', 'ip-src', 'ip-dst', 'md5', 'mutex', 'regkey', 'sha1', 'sha256', 'uri', 'url',
|
||||
'user-agent', 'whois-registrant-email', 'x509-fingerprint-md5']
|
||||
mapping_out = { # mapping between the MISP attributes types and the compatible CrowdStrike indicator types.
|
||||
'domain': {'types': 'hostname', 'to_ids': True},
|
||||
'email_address': {'types': 'email-src', 'to_ids': True},
|
||||
'email_subject': {'types': 'email-subject', 'to_ids': True},
|
||||
'file_name': {'types': 'filename', 'to_ids': True},
|
||||
'hash_md5': {'types': 'md5', 'to_ids': True},
|
||||
'hash_sha1': {'types': 'sha1', 'to_ids': True},
|
||||
'hash_sha256': {'types': 'sha256', 'to_ids': True},
|
||||
'ip_address': {'types': 'ip-dst', 'to_ids': True},
|
||||
'ip_address_block': {'types': 'ip-dst', 'to_ids': True},
|
||||
'mutex_name': {'types': 'mutex', 'to_ids': True},
|
||||
'registry': {'types': 'regkey', 'to_ids': True},
|
||||
'url': {'types': 'url', 'to_ids': True},
|
||||
'user_agent': {'types': 'user-agent', 'to_ids': True},
|
||||
'x509_serial': {'types': 'x509-fingerprint-md5', 'to_ids': True},
|
||||
mapping_out = { # mapping between the MISP attributes type and the compatible CrowdStrike indicator types.
|
||||
'domain': {'type': 'hostname', 'to_ids': True},
|
||||
'email_address': {'type': 'email-src', 'to_ids': True},
|
||||
'email_subject': {'type': 'email-subject', 'to_ids': True},
|
||||
'file_name': {'type': 'filename', 'to_ids': True},
|
||||
'hash_md5': {'type': 'md5', 'to_ids': True},
|
||||
'hash_sha1': {'type': 'sha1', 'to_ids': True},
|
||||
'hash_sha256': {'type': 'sha256', 'to_ids': True},
|
||||
'ip_address': {'type': 'ip-dst', 'to_ids': True},
|
||||
'ip_address_block': {'type': 'ip-dst', 'to_ids': True},
|
||||
'mutex_name': {'type': 'mutex', 'to_ids': True},
|
||||
'registry': {'type': 'regkey', 'to_ids': True},
|
||||
'url': {'type': 'url', 'to_ids': True},
|
||||
'user_agent': {'type': 'user-agent', 'to_ids': True},
|
||||
'x509_serial': {'type': 'x509-fingerprint-md5', 'to_ids': True},
|
||||
|
||||
'actors': {'types': 'threat-actor'},
|
||||
'malware_families': {'types': 'text', 'categories': 'Attribution'}
|
||||
'actors': {'type': 'threat-actor', 'category': 'Attribution'},
|
||||
'malware_families': {'type': 'text', 'category': 'Attribution'}
|
||||
}
|
||||
misp_types_out = [item['types'] for item in mapping_out.values()]
|
||||
mispattributes = {'input': misp_types_in, 'output': misp_types_out}
|
||||
|
||||
misp_type_out = [item['type'] for item in mapping_out.values()]
|
||||
mispattributes = {'input': misp_type_in, 'format': 'misp_standard'}
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
#validate CrowdStrike params
|
||||
if (request.get('config')):
|
||||
if (request['config'].get('apikey') is None):
|
||||
misperrors['error'] = 'CrowdStrike apikey is missing'
|
||||
|
@ -44,41 +46,64 @@ def handler(q=False):
|
|||
if (request['config'].get('api_id') is None):
|
||||
misperrors['error'] = 'CrowdStrike api_id is missing'
|
||||
return misperrors
|
||||
|
||||
#validate attribute
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
attribute = request.get('attribute')
|
||||
if not any(input_type == attribute.get('type') for input_type in misp_type_in):
|
||||
return {'error': 'Unsupported attribute type.'}
|
||||
|
||||
client = CSIntelAPI(request['config']['api_id'], request['config']['apikey'])
|
||||
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**request.get('attribute') )
|
||||
r = {"results": []}
|
||||
|
||||
valid_type = False
|
||||
for k in misp_types_in:
|
||||
if request.get(k):
|
||||
# map the MISP typ to the CrowdStrike type
|
||||
for item in lookup_indicator(client, request[k]):
|
||||
r['results'].append(item)
|
||||
valid_type = True
|
||||
|
||||
try:
|
||||
for k in misp_type_in:
|
||||
if attribute.type == k:
|
||||
# map the MISP type to the CrowdStrike type
|
||||
r['results'].append(lookup_indicator(client, attribute))
|
||||
valid_type = True
|
||||
except Exception as e:
|
||||
return {'error': f"{e}"}
|
||||
|
||||
if not valid_type:
|
||||
misperrors['error'] = "Unsupported attributes type"
|
||||
return misperrors
|
||||
return r
|
||||
return {'results': r.get('results').pop()}
|
||||
|
||||
|
||||
def lookup_indicator(client, item):
|
||||
result = client.search_indicator(item)
|
||||
for item in result:
|
||||
for relation in item['relations']:
|
||||
if mapping_out.get(relation['type']):
|
||||
r = mapping_out[relation['type']].copy()
|
||||
r['values'] = relation['indicator']
|
||||
yield(r)
|
||||
for actor in item['actors']:
|
||||
r = mapping_out['actors'].copy()
|
||||
r['values'] = actor
|
||||
yield(r)
|
||||
for malware_family in item['malware_families']:
|
||||
r = mapping_out['malware_families'].copy()
|
||||
r['values'] = malware_family
|
||||
yield(r)
|
||||
def lookup_indicator(client, ref_attribute):
|
||||
result = client.search_indicator(ref_attribute.value)
|
||||
misp_event = MISPEvent()
|
||||
misp_event.add_attribute(**ref_attribute)
|
||||
|
||||
for item in result.get('resources', []):
|
||||
for relation in item.get('relations'):
|
||||
if mapping_out.get(relation.get('type')):
|
||||
r = mapping_out[relation.get('type')].copy()
|
||||
r['value'] = relation.get('indicator')
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**r)
|
||||
misp_event.add_attribute(**attribute)
|
||||
for actor in item.get('actors'):
|
||||
r = mapping_out.get('actors').copy()
|
||||
r['value'] = actor
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**r)
|
||||
misp_event.add_attribute(**attribute)
|
||||
if item.get('malware_families'):
|
||||
r = mapping_out.get('malware_families').copy()
|
||||
r['value'] = f"malware_families: {' | '.join(item.get('malware_families'))}"
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**r)
|
||||
misp_event.add_attribute(**attribute)
|
||||
|
||||
event = json.loads(misp_event.to_json())
|
||||
return {'Object': event.get('Object', []), 'Attribute': event.get('Attribute', [])}
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
@ -90,39 +115,25 @@ def version():
|
|||
|
||||
|
||||
class CSIntelAPI():
|
||||
def __init__(self, custid=None, custkey=None, perpage=100, page=1, baseurl="https://intelapi.crowdstrike.com/indicator/v2/search/"):
|
||||
def __init__(self, custid=None, custkey=None):
|
||||
# customer id and key should be passed when obj is created
|
||||
self.custid = custid
|
||||
self.custkey = custkey
|
||||
self.falcon = Intel(client_id=custid, client_secret=custkey)
|
||||
|
||||
self.baseurl = baseurl
|
||||
self.perpage = perpage
|
||||
self.page = page
|
||||
|
||||
def request(self, query):
|
||||
headers = {'X-CSIX-CUSTID': self.custid,
|
||||
'X-CSIX-CUSTKEY': self.custkey,
|
||||
'Content-Type': 'application/json'}
|
||||
|
||||
full_query = self.baseurl + query
|
||||
|
||||
r = requests.get(full_query, headers=headers)
|
||||
def search_indicator(self, query):
|
||||
r = self.falcon.query_indicator_entities(q=query)
|
||||
# 400 - bad request
|
||||
if r.status_code == 400:
|
||||
if r.get('status_code') == 400:
|
||||
raise Exception('HTTP Error 400 - Bad request.')
|
||||
|
||||
# 404 - oh shit
|
||||
if r.status_code == 404:
|
||||
if r.get('status_code') == 404:
|
||||
raise Exception('HTTP Error 404 - awww snap.')
|
||||
|
||||
# catch all?
|
||||
if r.status_code != 200:
|
||||
raise Exception('HTTP Error: ' + str(r.status_code))
|
||||
if r.get('status_code') != 200:
|
||||
raise Exception('HTTP Error: ' + str(r.get('status_code')))
|
||||
|
||||
if r.text:
|
||||
return r
|
||||
if len(r.get('body').get('errors')):
|
||||
raise Exception('API Error: ' + ' | '.join(r.get('body').get('errors')))
|
||||
|
||||
def search_indicator(self, item):
|
||||
query = 'indicator?match=' + item
|
||||
r = self.request(query)
|
||||
return json.loads(r.text)
|
||||
return r.get('body', {})
|
|
@ -1,3 +1,7 @@
|
|||
# This module does not appear to be actively maintained.
|
||||
# Please see https://github.com/DomainTools/domaintools_misp
|
||||
# for the official DomainTools-supported MISP app
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
|
|
@ -2,7 +2,7 @@ import dnsdb2
|
|||
import json
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from datetime import datetime
|
||||
from pymisp import MISPEvent, MISPObject
|
||||
from pymisp import MISPEvent, MISPObject, Distribution
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
standard_query_input = [
|
||||
|
@ -43,7 +43,7 @@ moduleconfig = ['apikey', 'server', 'limit', 'flex_queries']
|
|||
|
||||
DEFAULT_DNSDB_SERVER = 'https://api.dnsdb.info'
|
||||
DEFAULT_LIMIT = 10
|
||||
|
||||
DEFAULT_DISTRIBUTION_SETTING = Distribution.your_organisation_only.value
|
||||
TYPE_TO_FEATURE = {
|
||||
"btc": "Bitcoin address",
|
||||
"dkim": "domainkeys identified mail",
|
||||
|
@ -103,6 +103,7 @@ class FarsightDnsdbParser():
|
|||
comment = self.comment % (query_type, TYPE_TO_FEATURE[self.attribute['type']], self.attribute['value'])
|
||||
for result in results:
|
||||
passivedns_object = MISPObject('passive-dns')
|
||||
passivedns_object.distribution = DEFAULT_DISTRIBUTION_SETTING
|
||||
if result.get('rdata') and isinstance(result['rdata'], list):
|
||||
for rdata in result.pop('rdata'):
|
||||
passivedns_object.add_attribute(**self._parse_attribute(comment, 'rdata', rdata))
|
||||
|
@ -121,7 +122,7 @@ class FarsightDnsdbParser():
|
|||
return {'results': results}
|
||||
|
||||
def _parse_attribute(self, comment, feature, value):
|
||||
attribute = {'value': value, 'comment': comment}
|
||||
attribute = {'value': value, 'comment': comment, 'distribution': DEFAULT_DISTRIBUTION_SETTING}
|
||||
attribute.update(self.passivedns_mapping[feature])
|
||||
return attribute
|
||||
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import requests
|
||||
import json
|
||||
|
||||
import requests
|
||||
from pymisp import MISPEvent, MISPObject
|
||||
|
||||
misperrors = {"error": "Error"}
|
||||
mispattributes = {"input": ["ip-dst", "ip-src"], "output": ["text"]}
|
||||
mispattributes = {"input": ["ip-dst", "ip-src", "vulnerability"], "output": ["text"]}
|
||||
moduleinfo = {
|
||||
"version": "1.0",
|
||||
"version": "1.1",
|
||||
"author": "Brad Chiappetta <brad@greynoise.io>",
|
||||
"description": "Module to access GreyNoise.io API.",
|
||||
"module-type": ["hover"],
|
||||
|
@ -15,16 +17,71 @@ codes_mapping = {
|
|||
"0x01": "The IP has been observed by the GreyNoise sensor network",
|
||||
"0x02": "The IP has been observed scanning the GreyNoise sensor network, "
|
||||
"but has not completed a full connection, meaning this can be spoofed",
|
||||
"0x03": "The IP is adjacent to another host that has been directly observed by "
|
||||
"the GreyNoise sensor network",
|
||||
"0x03": "The IP is adjacent to another host that has been directly observed by the GreyNoise sensor network",
|
||||
"0x04": "Reserved",
|
||||
"0x05": "This IP is commonly spoofed in Internet-scan activity",
|
||||
"0x06": "This IP has been observed as noise, but this host belongs to a cloud "
|
||||
"provider where IPs can be cycled frequently",
|
||||
"0x06": "This IP has been observed as noise, but this host belongs to a cloud provider where IPs can be "
|
||||
"cycled frequently",
|
||||
"0x07": "This IP is invalid",
|
||||
"0x08": "This IP was classified as noise, but has not been observed engaging in "
|
||||
"Internet-wide scans or attacks in over 60 days",
|
||||
"0x08": "This IP was classified as noise, but has not been observed engaging in Internet-wide scans or "
|
||||
"attacks in over 90 days",
|
||||
"0x09": "IP was found in RIOT",
|
||||
"0x10": "IP has been observed by the GreyNoise sensor network and is in RIOT",
|
||||
}
|
||||
vulnerability_mapping = {
|
||||
"id": ("vulnerability", "CVE #"),
|
||||
"details": ("text", "Details"),
|
||||
"count": ("text", "Total Scanner Count"),
|
||||
}
|
||||
enterprise_context_basic_mapping = {"ip": ("text", "IP Address"), "code_message": ("text", "Code Message")}
|
||||
enterprise_context_advanced_mapping = {
|
||||
"noise": ("text", "Is Internet Background Noise"),
|
||||
"link": ("link", "Visualizer Link"),
|
||||
"classification": ("text", "Classification"),
|
||||
"actor": ("text", "Actor"),
|
||||
"tags": ("text", "Tags"),
|
||||
"cve": ("text", "CVEs"),
|
||||
"first_seen": ("text", "First Seen Scanning"),
|
||||
"last_seen": ("text", "Last Seen Scanning"),
|
||||
"vpn": ("text", "Known VPN Service"),
|
||||
"vpn_service": ("text", "VPN Service Name"),
|
||||
"bot": ("text", "Known BOT"),
|
||||
}
|
||||
enterprise_context_advanced_metadata_mapping = {
|
||||
"asn": ("text", "ASN"),
|
||||
"rdns": ("text", "rDNS"),
|
||||
"category": ("text", "Category"),
|
||||
"tor": ("text", "Known Tor Exit Node"),
|
||||
"region": ("text", "Region"),
|
||||
"city": ("text", "City"),
|
||||
"country": ("text", "Country"),
|
||||
"country_code": ("text", "Country Code"),
|
||||
"organization": ("text", "Organization"),
|
||||
}
|
||||
enterprise_riot_mapping = {
|
||||
"riot": ("text", "Is Common Business Service"),
|
||||
"link": ("link", "Visualizer Link"),
|
||||
"category": ("text", "RIOT Category"),
|
||||
"name": ("text", "Provider Name"),
|
||||
"trust_level": ("text", "RIOT Trust Level"),
|
||||
"last_updated": ("text", "Last Updated"),
|
||||
}
|
||||
community_found_mapping = {
|
||||
"ip": ("text", "IP Address"),
|
||||
"noise": ("text", "Is Internet Background Noise"),
|
||||
"riot": ("text", "Is Common Business Service"),
|
||||
"classification": ("text", "Classification"),
|
||||
"last_seen": ("text", "Last Seen"),
|
||||
"name": ("text", "Name"),
|
||||
"link": ("link", "Visualizer Link"),
|
||||
}
|
||||
community_not_found_mapping = {
|
||||
"ip": ("text", "IP Address"),
|
||||
"noise": ("text", "Is Internet Background Noise"),
|
||||
"riot": ("text", "Is Common Business Service"),
|
||||
"message": ("text", "Message"),
|
||||
}
|
||||
misp_event = MISPEvent()
|
||||
|
||||
|
||||
def handler(q=False): # noqa: C901
|
||||
|
@ -33,66 +90,153 @@ def handler(q=False): # noqa: C901
|
|||
request = json.loads(q)
|
||||
if not request.get("config") or not request["config"].get("api_key"):
|
||||
return {"error": "Missing Greynoise API key."}
|
||||
if request["config"]["api_type"] and request["config"]["api_type"] == "enterprise":
|
||||
greynoise_api_url = "https://api.greynoise.io/v2/noise/quick/"
|
||||
else:
|
||||
greynoise_api_url = "https://api.greynoise.io/v3/community/"
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"key": request["config"]["api_key"],
|
||||
"User-Agent": "greynoise-misp-module-{}".format(moduleinfo["version"]),
|
||||
}
|
||||
for input_type in mispattributes["input"]:
|
||||
if input_type in request:
|
||||
ip = request[input_type]
|
||||
break
|
||||
else:
|
||||
misperrors["error"] = "Unsupported attributes type."
|
||||
|
||||
if not (request.get("vulnerability") or request.get("ip-dst") or request.get("ip-src")):
|
||||
misperrors["error"] = "Vulnerability id missing"
|
||||
return misperrors
|
||||
response = requests.get(f"{greynoise_api_url}{ip}", headers=headers) # Real request
|
||||
if response.status_code == 200:
|
||||
if request["config"]["api_type"] == "enterprise":
|
||||
return {
|
||||
"results": [
|
||||
{
|
||||
"types": ["text"],
|
||||
"values": codes_mapping[response.json()["code"]],
|
||||
}
|
||||
]
|
||||
}
|
||||
elif response.json()["noise"]:
|
||||
return {
|
||||
"results": [
|
||||
{
|
||||
"types": ["text"],
|
||||
"values": "IP Address ({}) has been observed by GreyNoise "
|
||||
"scanning the internet in the last 90 days. GreyNoise has "
|
||||
"classified it as {} and it was last seen on {}. For more "
|
||||
"information visit {}".format(
|
||||
response.json()["ip"],
|
||||
response.json()["classification"],
|
||||
response.json()["last_seen"],
|
||||
response.json()["link"],
|
||||
),
|
||||
}
|
||||
]
|
||||
}
|
||||
elif response.json()["riot"]:
|
||||
return {
|
||||
"results": [
|
||||
{
|
||||
"types": ["text"],
|
||||
"values": "IP Address ({}) is part of GreyNoise Project RIOT "
|
||||
"and likely belongs to a benign service from {}. For more "
|
||||
"information visit {}".format(
|
||||
response.json()["ip"],
|
||||
response.json()["name"],
|
||||
response.json()["link"],
|
||||
),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
ip = ""
|
||||
vulnerability = ""
|
||||
|
||||
if request.get("ip-dst"):
|
||||
ip = request.get("ip-dst")
|
||||
elif request.get("ip-src"):
|
||||
ip = request.get("ip-src")
|
||||
else:
|
||||
vulnerability = request.get("vulnerability")
|
||||
|
||||
if ip:
|
||||
if request["config"]["api_type"] and request["config"]["api_type"] == "enterprise":
|
||||
greynoise_api_url = "https://api.greynoise.io/v2/noise/quick/"
|
||||
else:
|
||||
greynoise_api_url = "https://api.greynoise.io/v3/community/"
|
||||
|
||||
response = requests.get(f"{greynoise_api_url}{ip}", headers=headers) # Real request for IP Query
|
||||
if response.status_code == 200:
|
||||
if request["config"]["api_type"] == "enterprise":
|
||||
response = response.json()
|
||||
enterprise_context_object = MISPObject("greynoise-ip-context")
|
||||
for feature in ("ip", "code_message"):
|
||||
if feature == "code_message":
|
||||
value = codes_mapping[response.get("code")]
|
||||
else:
|
||||
value = response.get(feature)
|
||||
if value:
|
||||
attribute_type, relation = enterprise_context_basic_mapping[feature]
|
||||
enterprise_context_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
if response["noise"]:
|
||||
greynoise_api_url = "https://api.greynoise.io/v2/noise/context/"
|
||||
context_response = requests.get(f"{greynoise_api_url}{ip}", headers=headers)
|
||||
context_response = context_response.json()
|
||||
context_response["link"] = "https://www.greynoise.io/viz/ip/" + ip
|
||||
if "tags" in context_response:
|
||||
context_response["tags"] = ",".join(context_response["tags"])
|
||||
if "cve" in context_response:
|
||||
context_response["cve"] = ",".join(context_response["cve"])
|
||||
for feature in enterprise_context_advanced_mapping.keys():
|
||||
value = context_response.get(feature)
|
||||
if value:
|
||||
attribute_type, relation = enterprise_context_advanced_mapping[feature]
|
||||
enterprise_context_object.add_attribute(
|
||||
relation, **{"type": attribute_type, "value": value}
|
||||
)
|
||||
for feature in enterprise_context_advanced_metadata_mapping.keys():
|
||||
value = context_response["metadata"].get(feature)
|
||||
if value:
|
||||
attribute_type, relation = enterprise_context_advanced_metadata_mapping[feature]
|
||||
enterprise_context_object.add_attribute(
|
||||
relation, **{"type": attribute_type, "value": value}
|
||||
)
|
||||
|
||||
if response["riot"]:
|
||||
greynoise_api_url = "https://api.greynoise.io/v2/riot/"
|
||||
riot_response = requests.get(f"{greynoise_api_url}{ip}", headers=headers)
|
||||
riot_response = riot_response.json()
|
||||
riot_response["link"] = "https://www.greynoise.io/viz/riot/" + ip
|
||||
for feature in enterprise_riot_mapping.keys():
|
||||
value = riot_response.get(feature)
|
||||
if value:
|
||||
attribute_type, relation = enterprise_riot_mapping[feature]
|
||||
enterprise_context_object.add_attribute(
|
||||
relation, **{"type": attribute_type, "value": value}
|
||||
)
|
||||
misp_event.add_object(enterprise_context_object)
|
||||
event = json.loads(misp_event.to_json())
|
||||
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
|
||||
return {"results": results}
|
||||
else:
|
||||
response = response.json()
|
||||
community_context_object = MISPObject("greynoise-community-ip-context")
|
||||
for feature in community_found_mapping.keys():
|
||||
value = response.get(feature)
|
||||
if value:
|
||||
attribute_type, relation = community_found_mapping[feature]
|
||||
community_context_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
misp_event.add_object(community_context_object)
|
||||
event = json.loads(misp_event.to_json())
|
||||
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
|
||||
return {"results": results}
|
||||
if response.status_code == 404 and request["config"]["api_type"] != "enterprise":
|
||||
response = response.json()
|
||||
community_context_object = MISPObject("greynoise-community-ip-context")
|
||||
for feature in community_not_found_mapping.keys():
|
||||
value = response.get(feature)
|
||||
if value:
|
||||
attribute_type, relation = community_not_found_mapping[feature]
|
||||
community_context_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
misp_event.add_object(community_context_object)
|
||||
event = json.loads(misp_event.to_json())
|
||||
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
|
||||
return {"results": results}
|
||||
|
||||
if vulnerability:
|
||||
if request["config"]["api_type"] and request["config"]["api_type"] == "enterprise":
|
||||
greynoise_api_url = "https://api.greynoise.io/v2/experimental/gnql/stats"
|
||||
querystring = {"query": f"last_seen:1w cve:{vulnerability}"}
|
||||
else:
|
||||
misperrors["error"] = "Vulnerability Not Supported with Community API Key"
|
||||
return misperrors
|
||||
|
||||
response = requests.get(f"{greynoise_api_url}", headers=headers, params=querystring) # Real request
|
||||
|
||||
if response.status_code == 200:
|
||||
response = response.json()
|
||||
vulnerability_object = MISPObject("greynoise-vuln-info")
|
||||
response["details"] = (
|
||||
"The IP count below reflects the number of IPs seen "
|
||||
"by GreyNoise in the last 7 days scanning for this CVE."
|
||||
)
|
||||
response["id"] = vulnerability
|
||||
for feature in ("id", "details", "count"):
|
||||
value = response.get(feature)
|
||||
if value:
|
||||
attribute_type, relation = vulnerability_mapping[feature]
|
||||
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
classifications = response["stats"].get("classifications")
|
||||
for item in classifications:
|
||||
if item["classification"] == "benign":
|
||||
value = item["count"]
|
||||
attribute_type, relation = ("text", "Benign Scanner Count")
|
||||
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
if item["classification"] == "unknown":
|
||||
value = item["count"]
|
||||
attribute_type, relation = ("text", "Unknown Scanner Count")
|
||||
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
if item["classification"] == "malicious":
|
||||
value = item["count"]
|
||||
attribute_type, relation = ("text", "Malicious Scanner Count")
|
||||
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
|
||||
misp_event.add_object(vulnerability_object)
|
||||
event = json.loads(misp_event.to_json())
|
||||
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
|
||||
return {"results": results}
|
||||
|
||||
# There is an error
|
||||
errors = {
|
||||
400: "Bad request.",
|
||||
|
@ -103,9 +247,7 @@ def handler(q=False): # noqa: C901
|
|||
try:
|
||||
misperrors["error"] = errors[response.status_code]
|
||||
except KeyError:
|
||||
misperrors[
|
||||
"error"
|
||||
] = f"GreyNoise API not accessible (HTTP {response.status_code})"
|
||||
misperrors["error"] = f"GreyNoise API not accessible (HTTP {response.status_code})"
|
||||
return misperrors
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
import json
|
||||
import requests
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from collections import defaultdict
|
||||
from pymisp import MISPEvent, MISPObject
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['md5', 'sha1', 'sha256'], 'format': 'misp_standard'}
|
||||
moduleinfo = {'version': '2', 'author': 'Alexandre Dulaunoy',
|
||||
'description': 'An expansion module to enrich a file hash with hashlookup.circl.lu services (NSRL and other sources)',
|
||||
'module-type': ['expansion', 'hover']}
|
||||
moduleconfig = ["custom_API"]
|
||||
hashlookup_url = 'https://hashlookup.circl.lu/'
|
||||
|
||||
|
||||
class HashlookupParser():
|
||||
def __init__(self, attribute, hashlookupresult, api_url):
|
||||
self.attribute = attribute
|
||||
self.hashlookupresult = hashlookupresult
|
||||
self.api_url = api_url
|
||||
self.misp_event = MISPEvent()
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
self.references = defaultdict(list)
|
||||
|
||||
def get_result(self):
|
||||
if self.references:
|
||||
self.__build_references()
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
|
||||
return {'results': results}
|
||||
|
||||
def parse_hashlookup_information(self):
|
||||
hashlookup_object = MISPObject('hashlookup')
|
||||
if 'source' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('source', **{'type': 'text', 'value': self.hashlookupresult['source']})
|
||||
if 'KnownMalicious' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('KnownMalicious', **{'type': 'text', 'value': self.hashlookupresult['KnownMalicious']})
|
||||
if 'MD5' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('MD5', **{'type': 'md5', 'value': self.hashlookupresult['MD5']})
|
||||
# SHA-1 is the default value in hashlookup it must always be present
|
||||
hashlookup_object.add_attribute('SHA-1', **{'type': 'sha1', 'value': self.hashlookupresult['SHA-1']})
|
||||
if 'SHA-256' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('SHA-256', **{'type': 'sha256', 'value': self.hashlookupresult['SHA-256']})
|
||||
if 'SSDEEP' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('SSDEEP', **{'type': 'ssdeep', 'value': self.hashlookupresult['SSDEEP']})
|
||||
if 'TLSH' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('TLSH', **{'type': 'tlsh', 'value': self.hashlookupresult['TLSH']})
|
||||
if 'FileName' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('FileName', **{'type': 'filename', 'value': self.hashlookupresult['FileName']})
|
||||
if 'FileSize' in self.hashlookupresult:
|
||||
hashlookup_object.add_attribute('FileSize', **{'type': 'size-in-bytes', 'value': self.hashlookupresult['FileSize']})
|
||||
hashlookup_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(hashlookup_object)
|
||||
|
||||
def __build_references(self):
|
||||
for object_uuid, references in self.references.items():
|
||||
for misp_object in self.misp_event.objects:
|
||||
if misp_object.uuid == object_uuid:
|
||||
for reference in references:
|
||||
misp_object.add_reference(**reference)
|
||||
break
|
||||
|
||||
def check_url(url):
|
||||
return "{}/".format(url) if not url.endswith('/') else url
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
attribute = request['attribute']
|
||||
if attribute.get('type') == 'md5':
|
||||
pass
|
||||
elif attribute.get('type') == 'sha1':
|
||||
pass
|
||||
elif attribute.get('type') == 'sha256':
|
||||
pass
|
||||
else:
|
||||
misperrors['error'] = 'md5 or sha1 or sha256 is missing.'
|
||||
return misperrors
|
||||
api_url = check_url(request['config']['custom_API']) if request['config'].get('custom_API') else hashlookup_url
|
||||
r = requests.get("{}/lookup/{}/{}".format(api_url, attribute.get('type'), attribute['value']))
|
||||
if r.status_code == 200:
|
||||
hashlookupresult = r.json()
|
||||
if not hashlookupresult:
|
||||
misperrors['error'] = 'Empty result'
|
||||
return misperrors
|
||||
elif r.status_code == 404:
|
||||
misperrors['error'] = 'Non existing hash'
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = 'API not accessible'
|
||||
return misperrors
|
||||
parser = HashlookupParser(attribute, hashlookupresult, api_url)
|
||||
parser.parse_hashlookup_information()
|
||||
result = parser.get_result()
|
||||
return result
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -0,0 +1,627 @@
|
|||
import json
|
||||
import logging
|
||||
import requests
|
||||
from requests.exceptions import (
|
||||
HTTPError,
|
||||
ProxyError,
|
||||
InvalidURL,
|
||||
ConnectTimeout
|
||||
)
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from pymisp import MISPEvent, MISPAttribute, MISPObject, MISPTag, Distribution
|
||||
|
||||
ip_query_input_type = [
|
||||
'ip-src',
|
||||
'ip-dst'
|
||||
]
|
||||
url_query_input_type = [
|
||||
'hostname',
|
||||
'domain',
|
||||
'url',
|
||||
'uri'
|
||||
]
|
||||
email_query_input_type = [
|
||||
'email',
|
||||
'email-src',
|
||||
'email-dst',
|
||||
'target-email',
|
||||
'whois-registrant-email'
|
||||
]
|
||||
phone_query_input_type = [
|
||||
'phone-number',
|
||||
'whois-registrant-phone'
|
||||
]
|
||||
|
||||
misperrors = {
|
||||
'error': 'Error'
|
||||
}
|
||||
mispattributes = {
|
||||
'input': ip_query_input_type + url_query_input_type + email_query_input_type + phone_query_input_type,
|
||||
'format': 'misp_standard'
|
||||
}
|
||||
moduleinfo = {
|
||||
'version': '0.1',
|
||||
'author': 'David Mackler',
|
||||
'description': 'IPQualityScore MISP Expansion Module for IP reputation, Email Validation, Phone Number Validation,'
|
||||
'Malicious Domain and Malicious URL Scanner.',
|
||||
'module-type': ['expansion', 'hover']
|
||||
}
|
||||
moduleconfig = ['apikey']
|
||||
|
||||
logger = logging.getLogger('ipqualityscore')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
BASE_URL = 'https://ipqualityscore.com/api/json'
|
||||
DEFAULT_DISTRIBUTION_SETTING = Distribution.your_organisation_only.value
|
||||
IP_ENRICH = 'ip'
|
||||
URL_ENRICH = 'url'
|
||||
EMAIL_ENRICH = 'email'
|
||||
PHONE_ENRICH = 'phone'
|
||||
|
||||
|
||||
class RequestHandler:
|
||||
"""A class for handling any outbound requests from this module."""
|
||||
|
||||
def __init__(self, apikey):
|
||||
self.session = requests.Session()
|
||||
self.api_key = apikey
|
||||
|
||||
def get(self, url: str, headers: dict = None, params: dict = None) -> requests.Response:
|
||||
"""General get method to fetch the response from IPQualityScore."""
|
||||
try:
|
||||
response = self.session.get(
|
||||
url, headers=headers, params=params
|
||||
).json()
|
||||
if str(response["success"]) != "True":
|
||||
msg = response["message"]
|
||||
logger.error(f"Error: {msg}")
|
||||
misperrors["error"] = msg
|
||||
else:
|
||||
return response
|
||||
except (ConnectTimeout, ProxyError, InvalidURL) as error:
|
||||
msg = "Error connecting with the IPQualityScore."
|
||||
logger.error(f"{msg} Error: {error}")
|
||||
misperrors["error"] = msg
|
||||
|
||||
def ipqs_lookup(self, reputation_type: str, ioc: str) -> requests.Response:
|
||||
"""Do a lookup call."""
|
||||
url = f"{BASE_URL}/{reputation_type}"
|
||||
payload = {reputation_type: ioc}
|
||||
headers = {"IPQS-KEY": self.api_key}
|
||||
try:
|
||||
response = self.get(url, headers, payload)
|
||||
except HTTPError as error:
|
||||
msg = f"Error when requesting data from IPQualityScore. {error.response}: {error.response.reason}"
|
||||
logger.error(msg)
|
||||
misperrors["error"] = msg
|
||||
raise
|
||||
return response
|
||||
|
||||
|
||||
def parse_attribute(comment, feature, value):
|
||||
"""Generic Method for parsing the attributes in the object"""
|
||||
attribute = {
|
||||
'type': 'text',
|
||||
'value': value,
|
||||
'comment': comment,
|
||||
'distribution': DEFAULT_DISTRIBUTION_SETTING,
|
||||
'object_relation': feature
|
||||
}
|
||||
return attribute
|
||||
|
||||
|
||||
class IPQualityScoreParser:
|
||||
"""A class for handling the enrichment objects"""
|
||||
|
||||
def __init__(self, attribute):
|
||||
self.rf_white = "#CCCCCC"
|
||||
self.rf_grey = " #CDCDCD"
|
||||
self.rf_yellow = "#FFCF00"
|
||||
self.rf_red = "#D10028"
|
||||
self.clean = "CLEAN"
|
||||
self.low = "LOW RISK"
|
||||
self.medium = "MODERATE RISK"
|
||||
self.high = "HIGH RISK"
|
||||
self.critical = "CRITICAL"
|
||||
self.invalid = "INVALID"
|
||||
self.suspicious = "SUSPICIOUS"
|
||||
self.malware = "CRITICAL"
|
||||
self.phishing = "CRITICAL"
|
||||
self.disposable = "CRITICAL"
|
||||
self.attribute = attribute
|
||||
self.misp_event = MISPEvent()
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
self.ipqs_object = MISPObject('IPQS Fraud and Risk Scoring Object')
|
||||
self.ipqs_object.template_uuid = "57d066e6-6d66-42a7-a1ad-e075e39b2b5e"
|
||||
self.ipqs_object.template_id = "1"
|
||||
self.ipqs_object.description = "IPQS Fraud and Risk Scoring Data"
|
||||
setattr(self.ipqs_object, 'meta-category', 'network')
|
||||
description = (
|
||||
"An object containing the enriched attribute and "
|
||||
"related entities from IPQualityScore."
|
||||
)
|
||||
self.ipqs_object.from_dict(
|
||||
**{"meta-category": "misc", "description": description, "distribution": DEFAULT_DISTRIBUTION_SETTING}
|
||||
)
|
||||
|
||||
temp_attr = MISPAttribute()
|
||||
temp_attr.from_dict(**attribute)
|
||||
self.enriched_attribute = MISPAttribute()
|
||||
self.enriched_attribute.from_dict(
|
||||
**{"value": temp_attr.value, "type": temp_attr.type, "distribution": DEFAULT_DISTRIBUTION_SETTING}
|
||||
)
|
||||
self.ipqs_object.distribution = DEFAULT_DISTRIBUTION_SETTING
|
||||
self.ip_data_items = [
|
||||
'fraud_score',
|
||||
'country_code',
|
||||
'region',
|
||||
'city',
|
||||
'zip_code',
|
||||
'ISP',
|
||||
'ASN',
|
||||
'organization',
|
||||
'is_crawler',
|
||||
'timezone',
|
||||
'mobile',
|
||||
'host',
|
||||
'proxy',
|
||||
'vpn',
|
||||
'tor',
|
||||
'active_vpn',
|
||||
'active_tor',
|
||||
'recent_abuse',
|
||||
'bot_status',
|
||||
'connection_type',
|
||||
'abuse_velocity',
|
||||
'latitude',
|
||||
'longitude'
|
||||
]
|
||||
self.ip_data_items_friendly_names = {
|
||||
'fraud_score': 'IPQS: Fraud Score',
|
||||
'country_code': 'IPQS: Country Code',
|
||||
'region': 'IPQS: Region',
|
||||
'city': 'IPQS: City',
|
||||
'zip_code': 'IPQS: Zip Code',
|
||||
'ISP': 'IPQS: ISP',
|
||||
'ASN': 'IPQS: ASN',
|
||||
'organization': 'IPQS: Organization',
|
||||
'is_crawler': 'IPQS: Is Crawler',
|
||||
'timezone': 'IPQS: Timezone',
|
||||
'mobile': 'IPQS: Mobile',
|
||||
'host': 'IPQS: Host',
|
||||
'proxy': 'IPQS: Proxy',
|
||||
'vpn': 'IPQS: VPN',
|
||||
'tor': 'IPQS: TOR',
|
||||
'active_vpn': 'IPQS: Active VPN',
|
||||
'active_tor': 'IPQS: Active TOR',
|
||||
'recent_abuse': 'IPQS: Recent Abuse',
|
||||
'bot_status': 'IPQS: Bot Status',
|
||||
'connection_type': 'IPQS: Connection Type',
|
||||
'abuse_velocity': 'IPQS: Abuse Velocity',
|
||||
'latitude': 'IPQS: Latitude',
|
||||
'longitude': 'IPQS: Longitude'
|
||||
}
|
||||
self.url_data_items = [
|
||||
'unsafe',
|
||||
'domain',
|
||||
'ip_address',
|
||||
'server',
|
||||
'domain_rank',
|
||||
'dns_valid',
|
||||
'parking',
|
||||
'spamming',
|
||||
'malware',
|
||||
'phishing',
|
||||
'suspicious',
|
||||
'adult',
|
||||
'risk_score',
|
||||
'category',
|
||||
'domain_age'
|
||||
]
|
||||
self.url_data_items_friendly_names = {
|
||||
'unsafe': 'IPQS: Unsafe',
|
||||
'domain': 'IPQS: Domain',
|
||||
'ip_address': 'IPQS: IP Address',
|
||||
'server': 'IPQS: Server',
|
||||
'domain_rank': 'IPQS: Domain Rank',
|
||||
'dns_valid': 'IPQS: DNS Valid',
|
||||
'parking': 'IPQS: Parking',
|
||||
'spamming': 'IPQS: Spamming',
|
||||
'malware': 'IPQS: Malware',
|
||||
'phishing': 'IPQS: Phishing',
|
||||
'suspicious': 'IPQS: Suspicious',
|
||||
'adult': 'IPQS: Adult',
|
||||
'risk_score': 'IPQS: Risk Score',
|
||||
'category': 'IPQS: Category',
|
||||
'domain_age': 'IPQS: Domain Age'
|
||||
}
|
||||
self.email_data_items = [
|
||||
'valid',
|
||||
'disposable',
|
||||
'smtp_score',
|
||||
'overall_score',
|
||||
'first_name',
|
||||
'generic',
|
||||
'common',
|
||||
'dns_valid',
|
||||
'honeypot',
|
||||
'deliverability',
|
||||
'frequent_complainer',
|
||||
'spam_trap_score',
|
||||
'catch_all',
|
||||
'timed_out',
|
||||
'suspect',
|
||||
'recent_abuse',
|
||||
'fraud_score',
|
||||
'suggested_domain',
|
||||
'leaked',
|
||||
'sanitized_email',
|
||||
'domain_age',
|
||||
'first_seen'
|
||||
]
|
||||
self.email_data_items_friendly_names = {
|
||||
'valid': 'IPQS: Valid',
|
||||
'disposable': 'IPQS: Disposable',
|
||||
'smtp_score': 'IPQS: SMTP Score',
|
||||
'overall_score': 'IPQS: Overall Score',
|
||||
'first_name': 'IPQS: First Name',
|
||||
'generic': 'IPQS: Generic',
|
||||
'common': 'IPQS: Common',
|
||||
'dns_valid': 'IPQS: DNS Valid',
|
||||
'honeypot': 'IPQS: Honeypot',
|
||||
'deliverability': 'IPQS: Deliverability',
|
||||
'frequent_complainer': 'IPQS: Frequent Complainer',
|
||||
'spam_trap_score': 'IPQS: Spam Trap Score',
|
||||
'catch_all': 'IPQS: Catch All',
|
||||
'timed_out': 'IPQS: Timed Out',
|
||||
'suspect': 'IPQS: Suspect',
|
||||
'recent_abuse': 'IPQS: Recent Abuse',
|
||||
'fraud_score': 'IPQS: Fraud Score',
|
||||
'suggested_domain': 'IPQS: Suggested Domain',
|
||||
'leaked': 'IPQS: Leaked',
|
||||
'sanitized_email': 'IPQS: Sanitized Email',
|
||||
'domain_age': 'IPQS: Domain Age',
|
||||
'first_seen': 'IPQS: First Seen'
|
||||
}
|
||||
self.phone_data_items = [
|
||||
'formatted',
|
||||
'local_format',
|
||||
'valid',
|
||||
'fraud_score',
|
||||
'recent_abuse',
|
||||
'VOIP',
|
||||
'prepaid',
|
||||
'risky',
|
||||
'active',
|
||||
'carrier',
|
||||
'line_type',
|
||||
'country',
|
||||
'city',
|
||||
'zip_code',
|
||||
'region',
|
||||
'dialing_code',
|
||||
'active_status',
|
||||
'leaked',
|
||||
'name',
|
||||
'timezone',
|
||||
'do_not_call',
|
||||
]
|
||||
self.phone_data_items_friendly_names = {
|
||||
'formatted': 'IPQS: Formatted',
|
||||
'local_format': 'IPQS: Local Format',
|
||||
'valid': 'IPQS: Valid',
|
||||
'fraud_score': 'IPQS: Fraud Score',
|
||||
'recent_abuse': 'IPQS: Recent Abuse',
|
||||
'VOIP': 'IPQS: VOIP',
|
||||
'prepaid': 'IPQS: Prepaid',
|
||||
'risky': 'IPQS: Risky',
|
||||
'active': 'IPQS: Active',
|
||||
'carrier': 'IPQS: Carrier',
|
||||
'line_type': 'IPQS: Line Type',
|
||||
'country': 'IPQS: Country',
|
||||
'city': 'IPQS: City',
|
||||
'zip_code': 'IPQS: Zip Code',
|
||||
'region': 'IPQS: Region',
|
||||
'dialing_code': 'IPQS: Dialing Code',
|
||||
'active_status': 'IPQS: Active Status',
|
||||
'leaked': 'IPQS: Leaked',
|
||||
'name': 'IPQS: Name',
|
||||
'timezone': 'IPQS: Timezone',
|
||||
'do_not_call': 'IPQS: Do Not Call',
|
||||
}
|
||||
self.timestamp_items_friendly_name = {
|
||||
'human': ' Human',
|
||||
'timestamp': ' Timestamp',
|
||||
'iso': ' ISO'
|
||||
}
|
||||
self.timestamp_items = [
|
||||
'human',
|
||||
'timestamp',
|
||||
'iso'
|
||||
]
|
||||
|
||||
def criticality_color(self, criticality) -> str:
|
||||
"""method which maps the color to the criticality level"""
|
||||
mapper = {
|
||||
self.clean: self.rf_grey,
|
||||
self.low: self.rf_grey,
|
||||
self.medium: self.rf_yellow,
|
||||
self.suspicious: self.rf_yellow,
|
||||
self.high: self.rf_red,
|
||||
self.critical: self.rf_red,
|
||||
self.invalid: self.rf_red,
|
||||
self.disposable: self.rf_red,
|
||||
self.malware: self.rf_red,
|
||||
self.phishing: self.rf_red
|
||||
}
|
||||
return mapper.get(criticality, self.rf_white)
|
||||
|
||||
def add_tag(self, tag_name: str, hex_color: str = None) -> None:
|
||||
"""Helper method for adding a tag to the enriched attribute."""
|
||||
tag = MISPTag()
|
||||
tag_properties = {"name": tag_name}
|
||||
if hex_color:
|
||||
tag_properties["colour"] = hex_color
|
||||
tag.from_dict(**tag_properties)
|
||||
self.enriched_attribute.add_tag(tag)
|
||||
|
||||
def ipqs_parser(self, query_response, enrich_type):
|
||||
""" helper method to call the enrichment function according to the type"""
|
||||
if enrich_type == IP_ENRICH:
|
||||
self.ip_reputation_data(query_response)
|
||||
elif enrich_type == URL_ENRICH:
|
||||
self.url_reputation_data(query_response)
|
||||
elif enrich_type == EMAIL_ENRICH:
|
||||
self.email_reputation_data(query_response)
|
||||
elif enrich_type == PHONE_ENRICH:
|
||||
self.phone_reputation_data(query_response)
|
||||
|
||||
def ip_reputation_data(self, query_response):
|
||||
"""method to create object for IP address"""
|
||||
comment = "Results from IPQualityScore IP Reputation API"
|
||||
for ip_data_item in self.ip_data_items:
|
||||
if ip_data_item in query_response:
|
||||
data_item = self.ip_data_items_friendly_names[ip_data_item]
|
||||
data_item_value = str(query_response[ip_data_item])
|
||||
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
|
||||
if ip_data_item == "fraud_score":
|
||||
fraud_score = int(data_item_value)
|
||||
self.ip_address_risk_scoring(fraud_score)
|
||||
|
||||
self.ipqs_object.add_attribute(
|
||||
"Enriched attribute", **self.enriched_attribute
|
||||
)
|
||||
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(self.ipqs_object)
|
||||
|
||||
def ip_address_risk_scoring(self, score):
|
||||
"""method to create calculate verdict for IP Address"""
|
||||
risk_criticality = ""
|
||||
if score == 100:
|
||||
risk_criticality = self.critical
|
||||
elif 85 <= score <= 99:
|
||||
risk_criticality = self.high
|
||||
elif 75 <= score <= 84:
|
||||
risk_criticality = self.medium
|
||||
elif 60 <= score <= 74:
|
||||
risk_criticality = self.suspicious
|
||||
elif score <= 59:
|
||||
risk_criticality = self.clean
|
||||
|
||||
hex_color = self.criticality_color(risk_criticality)
|
||||
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
|
||||
self.add_tag(tag_name, hex_color)
|
||||
|
||||
def url_reputation_data(self, query_response):
|
||||
"""method to create object for URL/Domain"""
|
||||
malware = False
|
||||
phishing = False
|
||||
risk_score = 0
|
||||
comment = "Results from IPQualityScore Malicious URL Scanner API"
|
||||
for url_data_item in self.url_data_items:
|
||||
if url_data_item in query_response:
|
||||
data_item_value = ""
|
||||
if url_data_item == "domain_age":
|
||||
for timestamp_item in self.timestamp_items:
|
||||
data_item = self.url_data_items_friendly_names[url_data_item] + \
|
||||
self.timestamp_items_friendly_name[timestamp_item]
|
||||
data_item_value = str(query_response[url_data_item][timestamp_item])
|
||||
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
|
||||
else:
|
||||
data_item = self.url_data_items_friendly_names[url_data_item]
|
||||
data_item_value = str(query_response[url_data_item])
|
||||
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
|
||||
|
||||
if url_data_item == "malware":
|
||||
malware = data_item_value
|
||||
if url_data_item == "phishing":
|
||||
phishing = data_item_value
|
||||
if url_data_item == "risk_score":
|
||||
risk_score = int(data_item_value)
|
||||
|
||||
self.url_risk_scoring(risk_score, malware, phishing)
|
||||
self.ipqs_object.add_attribute(
|
||||
"Enriched attribute", **self.enriched_attribute
|
||||
)
|
||||
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(self.ipqs_object)
|
||||
|
||||
def url_risk_scoring(self, score, malware, phishing):
|
||||
"""method to create calculate verdict for URL/Domain"""
|
||||
risk_criticality = ""
|
||||
if malware == 'True':
|
||||
risk_criticality = self.malware
|
||||
elif phishing == 'True':
|
||||
risk_criticality = self.phishing
|
||||
elif score >= 90:
|
||||
risk_criticality = self.high
|
||||
elif 80 <= score <= 89:
|
||||
risk_criticality = self.medium
|
||||
elif 70 <= score <= 79:
|
||||
risk_criticality = self.low
|
||||
elif 55 <= score <= 69:
|
||||
risk_criticality = self.suspicious
|
||||
elif score <= 54:
|
||||
risk_criticality = self.clean
|
||||
|
||||
hex_color = self.criticality_color(risk_criticality)
|
||||
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
|
||||
self.add_tag(tag_name, hex_color)
|
||||
|
||||
def email_reputation_data(self, query_response):
|
||||
"""method to create object for Email Address"""
|
||||
comment = "Results from IPQualityScore Email Verification API"
|
||||
disposable = False
|
||||
valid = False
|
||||
fraud_score = 0
|
||||
for email_data_item in self.email_data_items:
|
||||
if email_data_item in query_response:
|
||||
data_item_value = ""
|
||||
if email_data_item not in ("domain_age", "first_seen"):
|
||||
data_item = self.email_data_items_friendly_names[email_data_item]
|
||||
data_item_value = str(query_response[email_data_item])
|
||||
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
|
||||
else:
|
||||
for timestamp_item in self.timestamp_items:
|
||||
data_item = self.email_data_items_friendly_names[email_data_item] + \
|
||||
self.timestamp_items_friendly_name[timestamp_item]
|
||||
data_item_value = str(query_response[email_data_item][timestamp_item])
|
||||
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
|
||||
|
||||
if email_data_item == "disposable":
|
||||
disposable = data_item_value
|
||||
if email_data_item == "valid":
|
||||
valid = data_item_value
|
||||
if email_data_item == "fraud_score":
|
||||
fraud_score = int(data_item_value)
|
||||
|
||||
self.email_address_risk_scoring(fraud_score, disposable, valid)
|
||||
self.ipqs_object.add_attribute(
|
||||
"Enriched attribute", **self.enriched_attribute
|
||||
)
|
||||
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(self.ipqs_object)
|
||||
|
||||
def email_address_risk_scoring(self, score, disposable, valid):
|
||||
"""method to create calculate verdict for Email Address"""
|
||||
risk_criticality = ""
|
||||
if disposable == "True":
|
||||
risk_criticality = self.disposable
|
||||
elif valid == "False":
|
||||
risk_criticality = self.invalid
|
||||
elif score == 100:
|
||||
risk_criticality = self.high
|
||||
elif 88 <= score <= 99:
|
||||
risk_criticality = self.medium
|
||||
elif 80 <= score <= 87:
|
||||
risk_criticality = self.low
|
||||
elif score <= 79:
|
||||
risk_criticality = self.clean
|
||||
hex_color = self.criticality_color(risk_criticality)
|
||||
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
|
||||
|
||||
self.add_tag(tag_name, hex_color)
|
||||
|
||||
def phone_reputation_data(self, query_response):
|
||||
"""method to create object for Phone Number"""
|
||||
fraud_score = 0
|
||||
valid = False
|
||||
active = False
|
||||
comment = "Results from IPQualityScore Phone Number Validation API"
|
||||
for phone_data_item in self.phone_data_items:
|
||||
if phone_data_item in query_response:
|
||||
data_item = self.phone_data_items_friendly_names[phone_data_item]
|
||||
data_item_value = str(query_response[phone_data_item])
|
||||
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
|
||||
if phone_data_item == "active":
|
||||
active = data_item_value
|
||||
if phone_data_item == "valid":
|
||||
valid = data_item_value
|
||||
if phone_data_item == "fraud_score":
|
||||
fraud_score = int(data_item_value)
|
||||
|
||||
|
||||
self.phone_address_risk_scoring(fraud_score, valid, active)
|
||||
self.ipqs_object.add_attribute(
|
||||
"Enriched attribute", **self.enriched_attribute
|
||||
)
|
||||
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(self.ipqs_object)
|
||||
|
||||
def phone_address_risk_scoring(self, score, valid, active):
|
||||
"""method to create calculate verdict for Phone Number"""
|
||||
risk_criticality = ""
|
||||
if valid == "False":
|
||||
risk_criticality = self.medium
|
||||
elif active == "False":
|
||||
risk_criticality = self.medium
|
||||
elif 90 <= score <= 100:
|
||||
risk_criticality = self.high
|
||||
elif 80 <= score <= 89:
|
||||
risk_criticality = self.low
|
||||
elif 50 <= score <= 79:
|
||||
risk_criticality = self.suspicious
|
||||
elif score <= 49:
|
||||
risk_criticality = self.clean
|
||||
hex_color = self.criticality_color(risk_criticality)
|
||||
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
|
||||
self.add_tag(tag_name, hex_color)
|
||||
|
||||
def get_results(self):
|
||||
"""returns the dictionary object to MISP Instance"""
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results = {key: event[key] for key in ('Attribute', 'Object')}
|
||||
return {'results': results}
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
"""The function which accepts a JSON document to expand the values and return a dictionary of the expanded
|
||||
values. """
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
# check if the apikey is provided
|
||||
if not request.get('config') or not request['config'].get('apikey'):
|
||||
misperrors['error'] = 'IPQualityScore apikey is missing'
|
||||
return misperrors
|
||||
apikey = request['config'].get('apikey')
|
||||
# check attribute is added to the event
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
|
||||
attribute = request['attribute']
|
||||
attribute_type = attribute['type']
|
||||
attribute_value = attribute['value']
|
||||
|
||||
# check if the attribute type is supported by IPQualityScore
|
||||
if attribute_type not in mispattributes['input']:
|
||||
return {'error': 'Unsupported attributes type for IPqualityScore Enrichment'}
|
||||
request_handler = RequestHandler(apikey)
|
||||
enrich_type = ""
|
||||
if attribute_type in ip_query_input_type:
|
||||
enrich_type = IP_ENRICH
|
||||
json_response = request_handler.ipqs_lookup(IP_ENRICH, attribute_value)
|
||||
elif attribute_type in url_query_input_type:
|
||||
enrich_type = URL_ENRICH
|
||||
json_response = request_handler.ipqs_lookup(URL_ENRICH, attribute_value)
|
||||
elif attribute_type in email_query_input_type:
|
||||
enrich_type = EMAIL_ENRICH
|
||||
json_response = request_handler.ipqs_lookup(EMAIL_ENRICH, attribute_value)
|
||||
elif attribute_type in phone_query_input_type:
|
||||
enrich_type = PHONE_ENRICH
|
||||
json_response = request_handler.ipqs_lookup(PHONE_ENRICH, attribute_value)
|
||||
|
||||
parser = IPQualityScoreParser(attribute)
|
||||
parser.ipqs_parser(json_response, enrich_type)
|
||||
return parser.get_results()
|
||||
|
||||
|
||||
def introspection():
|
||||
"""The function that returns a dict of the supported attributes (input and output) by your expansion module."""
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
"""The function that returns a dict with the version and the associated meta-data including potential
|
||||
configurations required of the module. """
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -11,7 +11,7 @@ inputSource = ['link']
|
|||
moduleinfo = {'version': '0.2', 'author': 'Christian Studer',
|
||||
'description': 'Query Joe Sandbox API with a report URL to get the parsed data.',
|
||||
'module-type': ['expansion']}
|
||||
moduleconfig = ['apiurl', 'apikey', 'import_pe', 'import_mitre_attack']
|
||||
moduleconfig = ['apiurl', 'apikey', 'import_executable', 'import_mitre_attack']
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
|
@ -21,7 +21,7 @@ def handler(q=False):
|
|||
apiurl = request['config'].get('apiurl') or 'https://jbxcloud.joesecurity.org/api'
|
||||
apikey = request['config'].get('apikey')
|
||||
parser_config = {
|
||||
"import_pe": request["config"].get('import_pe', "false") == "true",
|
||||
"import_pe": request["config"].get('import_executable', "false") == "true",
|
||||
"mitre_attack": request["config"].get('import_mitre_attack', "false") == "true",
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
|
||||
|
||||
Module (type "expansion") to query a Lastline report from an analysis link.
|
||||
"""
|
||||
import json
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
|
||||
|
||||
Module (type "expansion") to submit files and URLs to Lastline for analysis.
|
||||
"""
|
||||
import base64
|
||||
|
|
|
@ -0,0 +1,239 @@
|
|||
# Written by mohlcyber 13.08.2021
|
||||
# MISP Module for McAfee MVISION Insights to query campaign details
|
||||
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
import sys
|
||||
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ["md5", "sha1", "sha256"],
|
||||
'format': 'misp_standard'}
|
||||
|
||||
# possible module-types: 'expansion', 'hover' or both
|
||||
moduleinfo = {'version': '1', 'author': 'Martin Ohl',
|
||||
'description': 'Lookup McAfee MVISION Insights Details',
|
||||
'module-type': ['hover']}
|
||||
|
||||
# config fields that your code expects from the site admin
|
||||
moduleconfig = ['api_key', 'client_id', 'client_secret']
|
||||
|
||||
|
||||
class MVAPI():
|
||||
def __init__(self, attribute, api_key, client_id, client_secret):
|
||||
self.misp_event = MISPEvent()
|
||||
self.attribute = MISPAttribute()
|
||||
self.attribute.from_dict(**attribute)
|
||||
self.misp_event.add_attribute(**self.attribute)
|
||||
|
||||
self.base_url = 'https://api.mvision.mcafee.com'
|
||||
self.session = requests.Session()
|
||||
|
||||
self.api_key = api_key
|
||||
auth = (client_id, client_secret)
|
||||
|
||||
self.logging()
|
||||
self.auth(auth)
|
||||
|
||||
def logging(self):
|
||||
self.logger = logging.getLogger('logs')
|
||||
self.logger.setLevel('INFO')
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter("%(asctime)s;%(levelname)s;%(message)s")
|
||||
handler.setFormatter(formatter)
|
||||
self.logger.addHandler(handler)
|
||||
|
||||
def auth(self, auth):
|
||||
iam_url = "https://iam.mcafee-cloud.com/iam/v1.1/token"
|
||||
|
||||
headers = {
|
||||
'x-api-key': self.api_key,
|
||||
'Content-Type': 'application/vnd.api+json'
|
||||
}
|
||||
|
||||
payload = {
|
||||
"grant_type": "client_credentials",
|
||||
"scope": "ins.user ins.suser ins.ms.r"
|
||||
}
|
||||
|
||||
res = self.session.post(iam_url, headers=headers, auth=auth, data=payload)
|
||||
|
||||
if res.status_code != 200:
|
||||
self.logger.error('Could not authenticate to get the IAM token: {0} - {1}'.format(res.status_code, res.text))
|
||||
sys.exit()
|
||||
else:
|
||||
self.logger.info('Successful authenticated.')
|
||||
access_token = res.json()['access_token']
|
||||
headers['Authorization'] = 'Bearer ' + access_token
|
||||
self.session.headers = headers
|
||||
|
||||
def search_ioc(self):
|
||||
filters = {
|
||||
'filter[type][eq]': self.attribute.type,
|
||||
'filter[value]': self.attribute.value,
|
||||
'fields': 'id, type, value, coverage, uid, is_coat, is_sdb_dirty, category, comment, campaigns, threat, prevalence'
|
||||
}
|
||||
res = self.session.get(self.base_url + '/insights/v2/iocs', params=filters)
|
||||
|
||||
if res.ok:
|
||||
if len(res.json()['data']) == 0:
|
||||
self.logger.info('No Hash details in MVISION Insights found.')
|
||||
else:
|
||||
self.logger.info('Successfully retrieved MVISION Insights details.')
|
||||
self.logger.debug(res.text)
|
||||
return res.json()
|
||||
else:
|
||||
self.logger.error('Error in search_ioc. HTTP {0} - {1}'.format(str(res.status_code), res.text))
|
||||
sys.exit()
|
||||
|
||||
def prep_result(self, ioc):
|
||||
res = ioc['data'][0]
|
||||
results = []
|
||||
|
||||
# Parse out Attribute Category
|
||||
category_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Attribute Category: {0}'.format(res['attributes']['category'])
|
||||
}
|
||||
results.append(category_attr)
|
||||
|
||||
# Parse out Attribute Comment
|
||||
comment_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Attribute Comment: {0}'.format(res['attributes']['comment'])
|
||||
}
|
||||
results.append(comment_attr)
|
||||
|
||||
# Parse out Attribute Dat Coverage
|
||||
cover_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Dat Version Coverage: {0}'.format(res['attributes']['coverage']['dat_version']['min'])
|
||||
}
|
||||
results.append(cover_attr)
|
||||
|
||||
# Parse out if Dirty
|
||||
cover_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Is Dirty: {0}'.format(res['attributes']['is-sdb-dirty'])
|
||||
}
|
||||
results.append(cover_attr)
|
||||
|
||||
# Parse our targeted countries
|
||||
countries_dict = []
|
||||
countries = res['attributes']['prevalence']['countries']
|
||||
|
||||
for country in countries:
|
||||
countries_dict.append(country['iso_code'])
|
||||
|
||||
country_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Targeted Countries: {0}'.format(countries_dict)
|
||||
}
|
||||
results.append(country_attr)
|
||||
|
||||
# Parse out targeted sectors
|
||||
sectors_dict = []
|
||||
sectors = res['attributes']['prevalence']['sectors']
|
||||
|
||||
for sector in sectors:
|
||||
sectors_dict.append(sector['sector'])
|
||||
|
||||
sector_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Targeted Sectors: {0}'.format(sectors_dict)
|
||||
}
|
||||
results.append(sector_attr)
|
||||
|
||||
# Parse out Threat Classification
|
||||
threat_class_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Threat Classification: {0}'.format(res['attributes']['threat']['classification'])
|
||||
}
|
||||
results.append(threat_class_attr)
|
||||
|
||||
# Parse out Threat Name
|
||||
threat_name_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Threat Name: {0}'.format(res['attributes']['threat']['name'])
|
||||
}
|
||||
results.append(threat_name_attr)
|
||||
|
||||
# Parse out Threat Severity
|
||||
threat_sev_attr = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Threat Severity: {0}'.format(res['attributes']['threat']['severity'])
|
||||
}
|
||||
results.append(threat_sev_attr)
|
||||
|
||||
# Parse out Attribute ID
|
||||
attr_id = {
|
||||
'type': 'text',
|
||||
'object_relation': 'text',
|
||||
'value': 'Attribute ID: {0}'.format(res['id'])
|
||||
}
|
||||
results.append(attr_id)
|
||||
|
||||
# Parse out Campaign Relationships
|
||||
campaigns = ioc['included']
|
||||
|
||||
for campaign in campaigns:
|
||||
campaign_attr = {
|
||||
'type': 'campaign-name',
|
||||
'object_relation': 'campaign-name',
|
||||
'value': campaign['attributes']['name']
|
||||
}
|
||||
results.append(campaign_attr)
|
||||
|
||||
mv_insights_obj = MISPObject(name='MVISION Insights Details')
|
||||
for mvi_res in results:
|
||||
mv_insights_obj.add_attribute(**mvi_res)
|
||||
mv_insights_obj.add_reference(self.attribute.uuid, 'mvision-insights-details')
|
||||
|
||||
self.misp_event.add_object(mv_insights_obj)
|
||||
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results_mvi = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
|
||||
|
||||
return {'results': results_mvi}
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
if not request.get('config') or not request['config'].get('api_key') or not request['config'].get('client_id') or not request['config'].get('client_secret'):
|
||||
misperrors['error'] = "Please provide MVISION API Key, Client ID and Client Secret."
|
||||
return misperrors
|
||||
if request['attribute']['type'] not in mispattributes['input']:
|
||||
return {'error': 'Unsupported attribute type. Please use {0}'.format(mispattributes['input'])}
|
||||
|
||||
api_key = request['config']['api_key']
|
||||
client_id = request['config']['client_id']
|
||||
client_secret = request['config']['client_secret']
|
||||
attribute = request['attribute']
|
||||
|
||||
mvi = MVAPI(attribute, api_key, client_id, client_secret)
|
||||
res = mvi.search_ioc()
|
||||
return mvi.prep_result(res)
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -0,0 +1,129 @@
|
|||
import json
|
||||
import requests
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from pymisp import MISPEvent, MISPObject
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['ip-src', 'ip-src|port', 'ip-dst', 'ip-dst|port'], 'format': 'misp_standard'}
|
||||
moduleinfo = {'version': '1', 'author': 'Jeroen Pinoy',
|
||||
'description': "An expansion module to enrich an ip with geolocation and asn information from an mmdb server "
|
||||
"such as ip.circl.lu.",
|
||||
'module-type': ['expansion', 'hover']}
|
||||
moduleconfig = ["custom_API", "db_source_filter"]
|
||||
mmdblookup_url = 'https://ip.circl.lu/'
|
||||
|
||||
|
||||
class MmdbLookupParser():
|
||||
def __init__(self, attribute, mmdblookupresult, api_url):
|
||||
self.attribute = attribute
|
||||
self.mmdblookupresult = mmdblookupresult
|
||||
self.api_url = api_url
|
||||
self.misp_event = MISPEvent()
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
|
||||
def get_result(self):
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
|
||||
return {'results': results}
|
||||
|
||||
def parse_mmdblookup_information(self):
|
||||
# There is a chance some db's have a hit while others don't so we have to check if entry is empty each time
|
||||
for result_entry in self.mmdblookupresult:
|
||||
if result_entry['country_info']:
|
||||
mmdblookup_object = MISPObject('geolocation')
|
||||
mmdblookup_object.add_attribute('country',
|
||||
**{'type': 'text', 'value': result_entry['country_info']['Country']})
|
||||
mmdblookup_object.add_attribute('countrycode',
|
||||
**{'type': 'text', 'value': result_entry['country']['iso_code']})
|
||||
mmdblookup_object.add_attribute('latitude',
|
||||
**{'type': 'float',
|
||||
'value': result_entry['country_info']['Latitude (average)']})
|
||||
mmdblookup_object.add_attribute('longitude',
|
||||
**{'type': 'float',
|
||||
'value': result_entry['country_info']['Longitude (average)']})
|
||||
mmdblookup_object.add_attribute('text',
|
||||
**{'type': 'text',
|
||||
'value': 'db_source: {}. build_db: {}. Latitude and longitude are country average.'.format(
|
||||
result_entry['meta']['db_source'],
|
||||
result_entry['meta']['build_db'])})
|
||||
mmdblookup_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(mmdblookup_object)
|
||||
if 'AutonomousSystemNumber' in result_entry['country']:
|
||||
mmdblookup_object_asn = MISPObject('asn')
|
||||
mmdblookup_object_asn.add_attribute('asn',
|
||||
**{'type': 'text',
|
||||
'value': result_entry['country'][
|
||||
'AutonomousSystemNumber']})
|
||||
mmdblookup_object_asn.add_attribute('description',
|
||||
**{'type': 'text',
|
||||
'value': 'ASNOrganization: {}. db_source: {}. build_db: {}.'.format(
|
||||
result_entry['country'][
|
||||
'AutonomousSystemOrganization'],
|
||||
result_entry['meta']['db_source'],
|
||||
result_entry['meta']['build_db'])})
|
||||
mmdblookup_object_asn.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(mmdblookup_object_asn)
|
||||
|
||||
|
||||
def check_url(url):
|
||||
return "{}/".format(url) if not url.endswith('/') else url
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
attribute = request['attribute']
|
||||
if attribute.get('type') == 'ip-src':
|
||||
toquery = attribute['value']
|
||||
elif attribute.get('type') == 'ip-src|port':
|
||||
toquery = attribute['value'].split('|')[0]
|
||||
elif attribute.get('type') == 'ip-dst':
|
||||
toquery = attribute['value']
|
||||
elif attribute.get('type') == 'ip-dst|port':
|
||||
toquery = attribute['value'].split('|')[0]
|
||||
else:
|
||||
misperrors['error'] = 'There is no attribute of type ip-src or ip-dst provided as input'
|
||||
return misperrors
|
||||
api_url = check_url(request['config']['custom_API']) if 'config' in request and request['config'].get(
|
||||
'custom_API') else mmdblookup_url
|
||||
r = requests.get("{}/geolookup/{}".format(api_url, toquery))
|
||||
if r.status_code == 200:
|
||||
mmdblookupresult = r.json()
|
||||
if not mmdblookupresult or len(mmdblookupresult) == 0:
|
||||
misperrors['error'] = 'Empty result returned by server'
|
||||
return misperrors
|
||||
if 'config' in request and request['config'].get('db_source_filter'):
|
||||
db_source_filter = request['config'].get('db_source_filter')
|
||||
mmdblookupresult = [entry for entry in mmdblookupresult if entry['meta']['db_source'] == db_source_filter]
|
||||
if not mmdblookupresult or len(mmdblookupresult) == 0:
|
||||
misperrors['error'] = 'There was no result with the selected db_source'
|
||||
return misperrors
|
||||
# Server might return one or multiple entries which could all be empty, we check if there is at least one
|
||||
# non-empty result below
|
||||
empty_result = True
|
||||
for lookup_result_entry in mmdblookupresult:
|
||||
if lookup_result_entry['country_info']:
|
||||
empty_result = False
|
||||
break
|
||||
if empty_result:
|
||||
misperrors['error'] = 'Empty result returned by server'
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = 'API not accessible - http status code {} was returned'.format(r.status_code)
|
||||
return misperrors
|
||||
parser = MmdbLookupParser(attribute, mmdblookupresult, api_url)
|
||||
parser.parse_mmdblookup_information()
|
||||
result = parser.get_result()
|
||||
return result
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -0,0 +1,142 @@
|
|||
import json
|
||||
import sys
|
||||
import base64
|
||||
#from distutils.util import strtobool
|
||||
|
||||
import io
|
||||
import zipfile
|
||||
|
||||
from pymisp import PyMISP
|
||||
from mwdblib import MWDB
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['attachment', 'malware-sample'], 'output': ['link']}
|
||||
moduleinfo = {'version': '1', 'author': 'Koen Van Impe',
|
||||
'description': 'Module to push malware samples to a MWDB instance',
|
||||
'module-type': ['expansion']}
|
||||
|
||||
moduleconfig = ['mwdb_apikey', 'mwdb_url', 'mwdb_misp_attribute', 'mwdb_public', 'include_tags_event', 'include_tags_attribute']
|
||||
|
||||
pymisp_keys_file = "/var/www/MISP/PyMISP/"
|
||||
mwdb_public_default = True
|
||||
|
||||
"""
|
||||
An expansion module to push malware samples to a MWDB (https://github.com/CERT-Polska/mwdb-core) instance.
|
||||
This module does not push samples to a sandbox. This can be achieved via Karton (connected to the MWDB)
|
||||
|
||||
Does:
|
||||
- Upload of attachment or malware sample to MWDB
|
||||
- Tags of events and/or attributes are added to MWDB.
|
||||
- Comment of the MISP attribute is added to MWDB.
|
||||
- A link back to the MISP event is added to MWDB via the MWDB attribute.
|
||||
- A link to the MWDB attribute is added as an enriched attribute to the MISP event.
|
||||
|
||||
Requires
|
||||
- mwdblib installed (pip install mwdblib)
|
||||
- (optional) keys.py file to add tags of events/attributes to MWDB
|
||||
- (optional) MWDB "attribute" created for the link back to MISP (defined in mwdb_misp_attribute)
|
||||
"""
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
try:
|
||||
data = request.get("data")
|
||||
if 'malware-sample' in request:
|
||||
# malicious samples are encrypted with zip (password infected) and then base64 encoded
|
||||
sample_filename = request.get("malware-sample").split("|", 1)[0]
|
||||
data = base64.b64decode(data)
|
||||
fl = io.BytesIO(data)
|
||||
zf = zipfile.ZipFile(fl)
|
||||
sample_hashname = zf.namelist()[0]
|
||||
data = zf.read(sample_hashname, b"infected")
|
||||
zf.close()
|
||||
elif 'attachment' in request:
|
||||
# All attachments get base64 encoded
|
||||
sample_filename = request.get("attachment")
|
||||
data = base64.b64decode(data)
|
||||
|
||||
else:
|
||||
misperrors['error'] = "No malware sample or attachment supplied"
|
||||
return misperrors
|
||||
except Exception:
|
||||
misperrors['error'] = "Unable to process submited sample data"
|
||||
return misperrors
|
||||
|
||||
if (request["config"].get("mwdb_apikey") is None) or (request["config"].get("mwdb_url") is None):
|
||||
misperrors["error"] = "Missing MWDB API key or server URL"
|
||||
return misperrors
|
||||
|
||||
mwdb_misp_attribute = request["config"].get("mwdb_misp_attribute")
|
||||
mwdb_public = request["config"].get("mwdb_public", mwdb_public_default)
|
||||
|
||||
include_tags_event = request["config"].get("include_tags_event")
|
||||
include_tags_attribute = request["config"].get("include_tags_attribute")
|
||||
misp_event_id = request.get("event_id")
|
||||
misp_attribute_uuid = request.get("attribute_uuid")
|
||||
misp_attribute_comment = ""
|
||||
mwdb_tags = []
|
||||
misp_info = ""
|
||||
|
||||
try:
|
||||
if include_tags_event:
|
||||
sys.path.append(pymisp_keys_file)
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
misp = PyMISP(misp_url, misp_key, misp_verifycert, False)
|
||||
misp_event = misp.get_event(misp_event_id)
|
||||
if "Event" in misp_event:
|
||||
misp_info = misp_event["Event"]["info"]
|
||||
if "Tag" in misp_event["Event"]:
|
||||
tags = misp_event["Event"]["Tag"]
|
||||
for tag in tags:
|
||||
if "misp-galaxy" not in tag["name"]:
|
||||
mwdb_tags.append(tag["name"])
|
||||
if include_tags_attribute:
|
||||
sys.path.append(pymisp_keys_file)
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
misp = PyMISP(misp_url, misp_key, misp_verifycert, False)
|
||||
misp_attribute = misp.get_attribute(misp_attribute_uuid)
|
||||
if "Attribute" in misp_attribute:
|
||||
if "Tag" in misp_attribute["Attribute"]:
|
||||
tags = misp_attribute["Attribute"]["Tag"]
|
||||
for tag in tags:
|
||||
if "misp-galaxy" not in tag["name"]:
|
||||
mwdb_tags.append(tag["name"])
|
||||
misp_attribute_comment = misp_attribute["Attribute"]["comment"]
|
||||
except Exception:
|
||||
misperrors['error'] = "Unable to read PyMISP (keys.py) configuration file"
|
||||
return misperrors
|
||||
|
||||
try:
|
||||
mwdb = MWDB(api_key=request["config"].get("mwdb_apikey"), api_url=request["config"].get("mwdb_url"))
|
||||
if mwdb_misp_attribute and len(mwdb_misp_attribute) > 0:
|
||||
metakeys = {mwdb_misp_attribute: misp_event_id}
|
||||
else:
|
||||
metakeys = False
|
||||
file_object = mwdb.upload_file(sample_filename, data, metakeys=metakeys, public=mwdb_public)
|
||||
for tag in mwdb_tags:
|
||||
file_object.add_tag(tag)
|
||||
if len(misp_attribute_comment) < 1:
|
||||
misp_attribute_comment = "MISP attribute {}".format(misp_attribute_uuid)
|
||||
file_object.add_comment(misp_attribute_comment)
|
||||
if len(misp_event) > 0:
|
||||
file_object.add_comment("Fetched from event {} - {}".format(misp_event_id, misp_info))
|
||||
mwdb_link = request["config"].get("mwdb_url").replace("/api", "/file/") + "{}".format(file_object.md5)
|
||||
except Exception:
|
||||
misperrors['error'] = "Unable to send sample to MWDB instance"
|
||||
return misperrors
|
||||
|
||||
r = {'results': [{'types': 'link', 'values': mwdb_link, 'comment': 'Link to MWDB sample'}]}
|
||||
return r
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -4,6 +4,7 @@ import np
|
|||
import ezodf
|
||||
import pandas_ods_reader
|
||||
import io
|
||||
import logging
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['attachment'],
|
||||
|
@ -35,13 +36,12 @@ def handler(q=False):
|
|||
num_sheets = len(doc.sheets)
|
||||
try:
|
||||
for i in range(0, num_sheets):
|
||||
ods = pandas_ods_reader.read_ods(ods_file, i, headers=False)
|
||||
ods = pandas_ods_reader.algo.read_data(pandas_ods_reader.parsers.ods, ods_file, i, headers=False)
|
||||
ods_content = ods_content + "\n" + ods.to_string(max_rows=None)
|
||||
print(ods_content)
|
||||
return {'results': [{'types': ['freetext'], 'values': ods_content, 'comment': ".ods-to-text from file " + filename},
|
||||
{'types': ['text'], 'values': ods_content, 'comment': ".ods-to-text from file " + filename}]}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
logging.exception(e)
|
||||
err = "Couldn't analyze file as .ods. Error was: " + str(e)
|
||||
misperrors['error'] = err
|
||||
return misperrors
|
||||
|
|
|
@ -0,0 +1,140 @@
|
|||
import json
|
||||
import requests
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from collections import defaultdict
|
||||
from pymisp import MISPEvent, MISPObject
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
|
||||
mispattributes = {'input': ['ip-src', 'ip-dst', 'ssh-fingerprint'],
|
||||
'format': 'misp_standard'}
|
||||
|
||||
moduleinfo = {'version': '1', 'author': 'Jean-Louis Huynen',
|
||||
'description': 'An expansion module to enrich, SSH key fingerprints and IP addresses with information collected by passive-ssh',
|
||||
'module-type': ['expansion', 'hover']}
|
||||
|
||||
moduleconfig = ["custom_api_url", "api_user", "api_key"]
|
||||
|
||||
passivessh_url = 'https://passivessh.circl.lu/'
|
||||
|
||||
host_query = '/host/ssh'
|
||||
fingerprint_query = '/fingerprint/all'
|
||||
|
||||
|
||||
class PassivesshParser():
|
||||
def __init__(self, attribute, passivesshresult):
|
||||
self.attribute = attribute
|
||||
self.passivesshresult = passivesshresult
|
||||
self.misp_event = MISPEvent()
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
self.references = defaultdict(list)
|
||||
|
||||
def get_result(self):
|
||||
if self.references:
|
||||
self.__build_references()
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results = {key: event[key] for key in (
|
||||
'Attribute', 'Object') if (key in event and event[key])}
|
||||
return {'results': results}
|
||||
|
||||
def parse_passivessh_information(self):
|
||||
passivessh_object = MISPObject('passive-ssh')
|
||||
if 'first_seen' in self.passivesshresult:
|
||||
passivessh_object.add_attribute(
|
||||
'first_seen', **{'type': 'datetime', 'value': self.passivesshresult['first_seen']})
|
||||
if 'last_seen' in self.passivesshresult:
|
||||
passivessh_object.add_attribute(
|
||||
'last_seen', **{'type': 'datetime', 'value': self.passivesshresult['last_seen']})
|
||||
if 'base64' in self.passivesshresult:
|
||||
passivessh_object.add_attribute(
|
||||
'base64', **{'type': 'text', 'value': self.passivesshresult['base64']})
|
||||
if 'keys' in self.passivesshresult:
|
||||
for key in self.passivesshresult['keys']:
|
||||
passivessh_object.add_attribute(
|
||||
'fingerprint', **{'type': 'ssh-fingerprint', 'value': key['fingerprint']})
|
||||
if 'hosts' in self.passivesshresult:
|
||||
for host in self.passivesshresult['hosts']:
|
||||
passivessh_object.add_attribute(
|
||||
'host', **{'type': 'ip-dst', 'value': host})
|
||||
|
||||
passivessh_object.add_reference(self.attribute['uuid'], 'related-to')
|
||||
self.misp_event.add_object(passivessh_object)
|
||||
|
||||
def __build_references(self):
|
||||
for object_uuid, references in self.references.items():
|
||||
for misp_object in self.misp_event.objects:
|
||||
if misp_object.uuid == object_uuid:
|
||||
for reference in references:
|
||||
misp_object.add_reference(**reference)
|
||||
break
|
||||
|
||||
|
||||
def check_url(url):
|
||||
return "{}/".format(url) if not url.endswith('/') else url
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
api_url = check_url(request['config']['custom_api_url']) if request['config'].get(
|
||||
'custom_api_url') else passivessh_url
|
||||
|
||||
if request['config'].get('api_user'):
|
||||
api_user = request['config'].get('api_user')
|
||||
else:
|
||||
misperrors['error'] = 'passive-ssh user required'
|
||||
return misperrors
|
||||
if request['config'].get('api_key'):
|
||||
api_key = request['config'].get('api_key')
|
||||
else:
|
||||
misperrors['error'] = 'passive-ssh password required'
|
||||
return misperrors
|
||||
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
attribute = request['attribute']
|
||||
if attribute.get('type') == 'ip-src':
|
||||
type = host_query
|
||||
pass
|
||||
elif attribute.get('type') == 'ip-dst':
|
||||
type = host_query
|
||||
pass
|
||||
elif attribute.get('type') == 'ssh-fingerprint':
|
||||
type = fingerprint_query
|
||||
pass
|
||||
else:
|
||||
misperrors['error'] = 'ip is missing.'
|
||||
return misperrors
|
||||
|
||||
r = requests.get("{}{}/{}".format(api_url, type,
|
||||
attribute['value']), auth=(api_user, api_key))
|
||||
|
||||
if r.status_code == 200:
|
||||
passivesshresult = r.json()
|
||||
if not passivesshresult:
|
||||
misperrors['error'] = 'Empty result'
|
||||
return misperrors
|
||||
elif r.status_code == 404:
|
||||
misperrors['error'] = 'Non existing hash'
|
||||
return misperrors
|
||||
else:
|
||||
misperrors['error'] = 'API not accessible'
|
||||
return misperrors
|
||||
|
||||
parser = PassivesshParser(attribute, passivesshresult)
|
||||
parser.parse_passivessh_information()
|
||||
result = parser.get_result()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -0,0 +1,221 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
from pymisp import MISPAttribute, MISPEvent, MISPTag, MISPObject
|
||||
from . import check_input_attribute, checking_error, standard_error_message
|
||||
|
||||
from qintel_helper import search_qsentry
|
||||
|
||||
logger = logging.getLogger('qintel_qsentry')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
moduleinfo = {
|
||||
'version': '1.0',
|
||||
'author': 'Qintel, LLC',
|
||||
'description': 'Query Qintel QSentry for ip intelligence',
|
||||
'module-type': ['hover', 'expansion']
|
||||
}
|
||||
|
||||
moduleconfig = ['token', 'remote']
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
|
||||
mispattributes = {
|
||||
'input': ['ip-src', 'ip-dst'],
|
||||
'output': ['ip-src', 'ip-dst', 'AS', 'freetext'],
|
||||
'format': 'misp_standard'
|
||||
}
|
||||
|
||||
TAG_COLOR = {
|
||||
'benign': '#27ae60',
|
||||
'suspicious': '#e6a902',
|
||||
'malicious': '#c0392b'
|
||||
}
|
||||
|
||||
CLIENT_HEADERS = {
|
||||
'User-Agent': f"MISP/{moduleinfo['version']}",
|
||||
}
|
||||
|
||||
|
||||
def _return_error(message):
|
||||
misperrors['error'] = message
|
||||
return misperrors
|
||||
|
||||
|
||||
def _make_tags(enriched_attr, result):
|
||||
|
||||
for tag in result['tags']:
|
||||
color = TAG_COLOR['suspicious']
|
||||
if tag == 'criminal':
|
||||
color = TAG_COLOR['malicious']
|
||||
|
||||
t = MISPTag()
|
||||
t.from_dict(**{
|
||||
'name': f'qintel:tag="{tag}"',
|
||||
'colour': color
|
||||
})
|
||||
enriched_attr.add_tag(**t)
|
||||
|
||||
return enriched_attr
|
||||
|
||||
|
||||
def _make_enriched_attr(event, result, orig_attr):
|
||||
|
||||
enriched_object = MISPObject('Qintel Threat Enrichment')
|
||||
enriched_object.add_reference(orig_attr.uuid, 'related-to')
|
||||
|
||||
enriched_attr = MISPAttribute()
|
||||
enriched_attr.from_dict(**{
|
||||
'value': orig_attr.value,
|
||||
'type': orig_attr.type,
|
||||
'distribution': 0,
|
||||
'object_relation': 'enriched-attr',
|
||||
'to_ids': orig_attr.to_ids
|
||||
})
|
||||
|
||||
enriched_attr = _make_tags(enriched_attr, result)
|
||||
enriched_object.add_attribute(**enriched_attr)
|
||||
|
||||
comment_attr = MISPAttribute()
|
||||
comment_attr.from_dict(**{
|
||||
'value': '\n'.join(result.get('descriptions', [])),
|
||||
'type': 'text',
|
||||
'object_relation': 'descriptions',
|
||||
'distribution': 0
|
||||
})
|
||||
enriched_object.add_attribute(**comment_attr)
|
||||
|
||||
last_seen = MISPAttribute()
|
||||
last_seen.from_dict(**{
|
||||
'value': result.get('last_seen'),
|
||||
'type': 'datetime',
|
||||
'object_relation': 'last-seen',
|
||||
'distribution': 0
|
||||
})
|
||||
enriched_object.add_attribute(**last_seen)
|
||||
|
||||
event.add_attribute(**orig_attr)
|
||||
event.add_object(**enriched_object)
|
||||
|
||||
return event
|
||||
|
||||
|
||||
def _make_asn_attr(event, result, orig_attr):
|
||||
|
||||
asn_object = MISPObject('asn')
|
||||
asn_object.add_reference(orig_attr.uuid, 'related-to')
|
||||
|
||||
asn_attr = MISPAttribute()
|
||||
asn_attr.from_dict(**{
|
||||
'type': 'AS',
|
||||
'value': result.get('asn'),
|
||||
'object_relation': 'asn',
|
||||
'distribution': 0
|
||||
})
|
||||
asn_object.add_attribute(**asn_attr)
|
||||
|
||||
org_attr = MISPAttribute()
|
||||
org_attr.from_dict(**{
|
||||
'type': 'text',
|
||||
'value': result.get('asn_name', 'unknown').title(),
|
||||
'object_relation': 'description',
|
||||
'distribution': 0
|
||||
})
|
||||
asn_object.add_attribute(**org_attr)
|
||||
|
||||
event.add_object(**asn_object)
|
||||
|
||||
return event
|
||||
|
||||
|
||||
def _format_hover(event, result):
|
||||
|
||||
enriched_object = event.get_objects_by_name('Qintel Threat Enrichment')[0]
|
||||
|
||||
tags = ', '.join(result.get('tags'))
|
||||
enriched_object.add_attribute('Tags', type='text', value=tags)
|
||||
|
||||
return event
|
||||
|
||||
|
||||
def _format_result(attribute, result):
|
||||
|
||||
event = MISPEvent()
|
||||
|
||||
orig_attr = MISPAttribute()
|
||||
orig_attr.from_dict(**attribute)
|
||||
|
||||
event = _make_enriched_attr(event, result, orig_attr)
|
||||
event = _make_asn_attr(event, result, orig_attr)
|
||||
|
||||
return event
|
||||
|
||||
|
||||
def _check_config(config):
|
||||
if not config:
|
||||
return False
|
||||
|
||||
if not isinstance(config, dict):
|
||||
return False
|
||||
|
||||
if config.get('token', '') == '':
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _check_request(request):
|
||||
if not request.get('attribute'):
|
||||
return f'{standard_error_message}, {checking_error}'
|
||||
|
||||
check_reqs = ('type', 'value')
|
||||
if not check_input_attribute(request['attribute'],
|
||||
requirements=check_reqs):
|
||||
return f'{standard_error_message}, {checking_error}'
|
||||
|
||||
if request['attribute']['type'] not in mispattributes['input']:
|
||||
return 'Unsupported attribute type'
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if not q:
|
||||
return False
|
||||
|
||||
request = json.loads(q)
|
||||
config = request.get('config')
|
||||
|
||||
if not _check_config(config):
|
||||
return _return_error('Missing Qintel token')
|
||||
|
||||
check_request_error = _check_request(request)
|
||||
if check_request_error:
|
||||
return _return_error(check_request_error)
|
||||
|
||||
search_args = {
|
||||
'token': config['token'],
|
||||
'remote': config.get('remote')
|
||||
}
|
||||
|
||||
try:
|
||||
result = search_qsentry(request['attribute']['value'], **search_args)
|
||||
except Exception as e:
|
||||
return _return_error(str(e))
|
||||
|
||||
event = _format_result(request['attribute'], result)
|
||||
if not request.get('event_id'):
|
||||
event = _format_hover(event, result)
|
||||
|
||||
event = json.loads(event.to_json())
|
||||
|
||||
ret_result = {key: event[key] for key in ('Attribute', 'Object') if key
|
||||
in event}
|
||||
return {'results': ret_result}
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -1,8 +1,14 @@
|
|||
import json
|
||||
import logging
|
||||
import requests
|
||||
from requests.exceptions import HTTPError, ProxyError,\
|
||||
InvalidURL, ConnectTimeout, ConnectionError
|
||||
from requests.exceptions import (
|
||||
HTTPError,
|
||||
ProxyError,
|
||||
InvalidURL,
|
||||
ConnectTimeout,
|
||||
ConnectionError,
|
||||
)
|
||||
from typing import Optional, List, Tuple, Dict
|
||||
from . import check_input_attribute, checking_error, standard_error_message
|
||||
import platform
|
||||
import os
|
||||
|
@ -10,47 +16,63 @@ from urllib.parse import quote, urlparse
|
|||
from pymisp import MISPAttribute, MISPEvent, MISPTag, MISPObject
|
||||
|
||||
moduleinfo = {
|
||||
'version': '1.0.1',
|
||||
'author': 'Recorded Future',
|
||||
'description': 'Module to retrieve data from Recorded Future',
|
||||
'module-type': ['expansion', 'hover']
|
||||
"version": "2.0.0",
|
||||
"author": "Recorded Future",
|
||||
"description": "Module to retrieve data from Recorded Future",
|
||||
"module-type": ["expansion", "hover"],
|
||||
}
|
||||
|
||||
moduleconfig = ['token', 'proxy_host', 'proxy_port', 'proxy_username', 'proxy_password']
|
||||
moduleconfig = ["token", "proxy_host", "proxy_port", "proxy_username", "proxy_password"]
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
misperrors = {"error": "Error"}
|
||||
|
||||
ATTRIBUTES = [
|
||||
'ip',
|
||||
'ip-src',
|
||||
'ip-dst',
|
||||
'domain',
|
||||
'hostname',
|
||||
'md5',
|
||||
'sha1',
|
||||
'sha256',
|
||||
'uri',
|
||||
'url',
|
||||
'vulnerability',
|
||||
'weakness'
|
||||
GALAXY_FILE_PATH = "https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/"
|
||||
|
||||
ATTRIBUTESTYPES = [
|
||||
"ip",
|
||||
"ip-src",
|
||||
"ip-dst",
|
||||
"ip-src|port",
|
||||
"ip-dst|port",
|
||||
"domain",
|
||||
"hostname",
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha256",
|
||||
"uri",
|
||||
"url",
|
||||
"vulnerability",
|
||||
"weakness",
|
||||
]
|
||||
|
||||
OUTPUTATTRIBUTESTYPES = ATTRIBUTESTYPES + [
|
||||
"email-src",
|
||||
"malware-sample",
|
||||
"text",
|
||||
"target-org",
|
||||
"threat-actor",
|
||||
"target-user",
|
||||
]
|
||||
|
||||
mispattributes = {
|
||||
'input': ATTRIBUTES,
|
||||
'output': ATTRIBUTES + ['email-src', 'text'],
|
||||
'format': 'misp_standard'
|
||||
"input": ATTRIBUTESTYPES,
|
||||
"output": OUTPUTATTRIBUTESTYPES,
|
||||
"format": "misp_standard",
|
||||
}
|
||||
|
||||
LOGGER = logging.getLogger('recorded_future')
|
||||
LOGGER = logging.getLogger("recorded_future")
|
||||
LOGGER.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class RequestHandler:
|
||||
"""A class for handling any outbound requests from this module."""
|
||||
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
self.app_id = f'{os.path.basename(__file__)}/{moduleinfo["version"]} ({platform.platform()}) ' \
|
||||
f'misp_enrichment/{moduleinfo["version"]} python-requests/{requests.__version__}'
|
||||
self.app_id = (
|
||||
f'{os.path.basename(__file__)}/{moduleinfo["version"]} ({platform.platform()}) '
|
||||
f'misp_enrichment/{moduleinfo["version"]} python-requests/{requests.__version__}'
|
||||
)
|
||||
self.proxies = None
|
||||
self.rf_token = None
|
||||
|
||||
|
@ -58,27 +80,28 @@ class RequestHandler:
|
|||
"""General get method with proxy error handling."""
|
||||
try:
|
||||
timeout = 7 if self.proxies else None
|
||||
response = self.session.get(url, headers=headers, proxies=self.proxies, timeout=timeout)
|
||||
response = self.session.get(
|
||||
url, headers=headers, proxies=self.proxies, timeout=timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
except (ConnectTimeout, ProxyError, InvalidURL) as error:
|
||||
msg = 'Error connecting with proxy, please check the Recorded Future app proxy settings.'
|
||||
LOGGER.error(f'{msg} Error: {error}')
|
||||
misperrors['error'] = msg
|
||||
msg = "Error connecting with proxy, please check the Recorded Future app proxy settings."
|
||||
LOGGER.error(f"{msg} Error: {error}")
|
||||
misperrors["error"] = msg
|
||||
raise
|
||||
|
||||
def rf_lookup(self, category: str, ioc: str) -> requests.Response:
|
||||
"""Do a lookup call using Recorded Future's ConnectAPI."""
|
||||
parsed_ioc = quote(ioc, safe='')
|
||||
url = f'https://api.recordedfuture.com/v2/{category}/{parsed_ioc}?fields=risk%2CrelatedEntities'
|
||||
headers = {'X-RFToken': self.rf_token,
|
||||
'User-Agent': self.app_id}
|
||||
parsed_ioc = quote(ioc, safe="")
|
||||
url = f"https://api.recordedfuture.com/gw/misp/lookup/{category}/{parsed_ioc}"
|
||||
headers = {"X-RFToken": self.rf_token, "User-Agent": self.app_id}
|
||||
try:
|
||||
response = self.get(url, headers)
|
||||
except HTTPError as error:
|
||||
msg = f'Error when requesting data from Recorded Future. {error.response}: {error.response.reason}'
|
||||
msg = f"Error when requesting data from Recorded Future. {error.response}: {error.response.reason}"
|
||||
LOGGER.error(msg)
|
||||
misperrors['error'] = msg
|
||||
misperrors["error"] = msg
|
||||
raise
|
||||
return response
|
||||
|
||||
|
@ -88,20 +111,49 @@ GLOBAL_REQUEST_HANDLER = RequestHandler()
|
|||
|
||||
class GalaxyFinder:
|
||||
"""A class for finding MISP galaxy matches to Recorded Future data."""
|
||||
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
# There are duplicates values for different keys because Links entities and Related entities
|
||||
# have have different naming for the same types
|
||||
self.sources = {
|
||||
'RelatedThreatActor': [
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/threat-actor.json'
|
||||
"RelatedThreatActor": [f"{GALAXY_FILE_PATH}threat-actor.json"],
|
||||
"Threat Actor": [f"{GALAXY_FILE_PATH}threat-actor.json"],
|
||||
"RelatedMalware": [
|
||||
f"{GALAXY_FILE_PATH}banker.json",
|
||||
f"{GALAXY_FILE_PATH}botnet.json",
|
||||
f"{GALAXY_FILE_PATH}exploit-kit.json",
|
||||
f"{GALAXY_FILE_PATH}rat.json",
|
||||
f"{GALAXY_FILE_PATH}ransomware.json",
|
||||
f"{GALAXY_FILE_PATH}malpedia.json",
|
||||
],
|
||||
"Malware": [
|
||||
f"{GALAXY_FILE_PATH}banker.json",
|
||||
f"{GALAXY_FILE_PATH}botnet.json",
|
||||
f"{GALAXY_FILE_PATH}exploit-kit.json",
|
||||
f"{GALAXY_FILE_PATH}rat.json",
|
||||
f"{GALAXY_FILE_PATH}ransomware.json",
|
||||
f"{GALAXY_FILE_PATH}malpedia.json",
|
||||
],
|
||||
"MitreAttackIdentifier": [
|
||||
f"{GALAXY_FILE_PATH}mitre-attack-pattern.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-course-of-action.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-enterprise-attack-attack-pattern.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-enterprise-attack-course-of-action.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-enterprise-attack-intrusion-set.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-enterprise-attack-malware.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-enterprise-attack-tool.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-intrusion-set.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-malware.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-mobile-attack-attack-pattern.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-mobile-attack-course-of-action.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-mobile-attack-intrusion-set.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-mobile-attack-malware.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-mobile-attack-tool.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-pre-attack-attack-pattern.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-pre-attack-intrusion-set.json",
|
||||
f"{GALAXY_FILE_PATH}mitre-tool.json",
|
||||
],
|
||||
'RelatedMalware': [
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/banker.json',
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/botnet.json',
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/exploit-kit.json',
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/rat.json',
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/ransomware.json',
|
||||
'https://raw.githubusercontent.com/MISP/misp-galaxy/main/clusters/malpedia.json'
|
||||
]
|
||||
}
|
||||
self.galaxy_clusters = {}
|
||||
|
||||
|
@ -112,33 +164,38 @@ class GalaxyFinder:
|
|||
for source in self.sources.get(related_type):
|
||||
try:
|
||||
response = GLOBAL_REQUEST_HANDLER.get(source)
|
||||
name = source.split('/')[-1].split('.')[0]
|
||||
self.galaxy_clusters[related_type] = {name: response.json()}
|
||||
name = source.split("/")[-1].split(".")[0]
|
||||
self.galaxy_clusters.setdefault(related_type, {}).update(
|
||||
{name: response.json()}
|
||||
)
|
||||
except ConnectionError as error:
|
||||
LOGGER.warning(f'pull_galaxy_cluster failed for source: {source}, with error: {error}.')
|
||||
LOGGER.warning(
|
||||
f"pull_galaxy_cluster failed for source: {source}, with error: {error}."
|
||||
)
|
||||
|
||||
def find_galaxy_match(self, indicator: str, related_type: str) -> str:
|
||||
"""Searches the clusters of the related_type for a match with the indicator.
|
||||
:returns the first matching galaxy string or an empty string if no galaxy match is found.
|
||||
:returns the first matching galaxy string or an empty string if no galaxy match is found.
|
||||
"""
|
||||
self.pull_galaxy_cluster(related_type)
|
||||
for cluster_name, cluster in self.galaxy_clusters.get(related_type, {}).items():
|
||||
for value in cluster['values']:
|
||||
try:
|
||||
if indicator in value['meta']['synonyms'] or indicator in value['value']:
|
||||
value = value['value']
|
||||
return f'misp-galaxy:{cluster_name}="{value}"'
|
||||
except KeyError:
|
||||
pass
|
||||
return ''
|
||||
for value in cluster["values"]:
|
||||
if indicator in value.get("meta", {}).get(
|
||||
"synonyms", ""
|
||||
) or indicator in value.get("value", ""):
|
||||
value = value["value"]
|
||||
return f'misp-galaxy:{cluster_name}="{value}"'
|
||||
return ""
|
||||
|
||||
|
||||
class RFColors:
|
||||
"""Class for setting signature RF-colors."""
|
||||
|
||||
def __init__(self):
|
||||
self.rf_white = '#CCCCCC'
|
||||
self.rf_yellow = '#FFCE00'
|
||||
self.rf_red = '#CF0A2C'
|
||||
self.rf_white = "#CCCCCC"
|
||||
self.rf_grey = " #CDCDCD"
|
||||
self.rf_yellow = "#FFCF00"
|
||||
self.rf_red = "#D10028"
|
||||
|
||||
def riskscore_color(self, risk_score: int) -> str:
|
||||
"""Returns appropriate hex-colors according to risk score."""
|
||||
|
@ -160,194 +217,276 @@ class RFColors:
|
|||
else: # risk_rule_criticality == 3 or 4
|
||||
return self.rf_red
|
||||
|
||||
def criticality_color(self, criticality) -> str:
|
||||
mapper = {
|
||||
"None": self.rf_grey,
|
||||
"Low": self.rf_grey,
|
||||
"Unusual": self.rf_grey,
|
||||
"Informational": self.rf_grey,
|
||||
"Medium": self.rf_yellow,
|
||||
"Suspicious": self.rf_yellow,
|
||||
"High": self.rf_red,
|
||||
"Critical": self.rf_red,
|
||||
"Very Critical": self.rf_red,
|
||||
"Malicious": self.rf_red,
|
||||
"Very Malicious": self.rf_red,
|
||||
}
|
||||
return mapper.get(criticality, self.rf_white)
|
||||
|
||||
|
||||
class RFEnricher:
|
||||
"""Class for enriching an attribute with data from Recorded Future.
|
||||
The enrichment data is returned as a custom MISP object.
|
||||
The enrichment data is returned as a custom MISP object.
|
||||
"""
|
||||
|
||||
def __init__(self, attribute_props: dict):
|
||||
self.event = MISPEvent()
|
||||
self.enrichment_object = MISPObject('Recorded Future Enrichment')
|
||||
self.enrichment_object = MISPObject("Recorded Future Enrichment")
|
||||
self.enrichment_object.template_uuid = "cbe0ffda-75e5-4c49-833f-093f057652ba"
|
||||
self.enrichment_object.template_id = "1"
|
||||
self.enrichment_object.description = "Recorded Future Enrichment"
|
||||
setattr(self.enrichment_object, 'meta-category', 'network')
|
||||
description = (
|
||||
'An object containing the enriched attribute and '
|
||||
'related entities from Recorded Future.'
|
||||
"An object containing the enriched attribute and "
|
||||
"related entities from Recorded Future."
|
||||
)
|
||||
self.enrichment_object.from_dict(
|
||||
**{"meta-category": "misc", "description": description, "distribution": 0}
|
||||
)
|
||||
self.enrichment_object.from_dict(**{
|
||||
'meta-category': 'misc',
|
||||
'description': description,
|
||||
'distribution': 0
|
||||
})
|
||||
|
||||
# Create a copy of enriched attribute to add tags to
|
||||
temp_attr = MISPAttribute()
|
||||
temp_attr.from_dict(**attribute_props)
|
||||
self.enriched_attribute = MISPAttribute()
|
||||
self.enriched_attribute.from_dict(**{
|
||||
'value': temp_attr.value,
|
||||
'type': temp_attr.type,
|
||||
'distribution': 0
|
||||
})
|
||||
self.enriched_attribute.from_dict(
|
||||
**{"value": temp_attr.value, "type": temp_attr.type, "distribution": 0}
|
||||
)
|
||||
|
||||
self.related_attributes = []
|
||||
self.related_attributes: List[Tuple[str, MISPAttribute]] = []
|
||||
self.color_picker = RFColors()
|
||||
self.galaxy_finder = GalaxyFinder()
|
||||
|
||||
# Mapping from MISP-type to RF-type
|
||||
self.type_to_rf_category = {
|
||||
'ip': 'ip',
|
||||
'ip-src': 'ip',
|
||||
'ip-dst': 'ip',
|
||||
'domain': 'domain',
|
||||
'hostname': 'domain',
|
||||
'md5': 'hash',
|
||||
'sha1': 'hash',
|
||||
'sha256': 'hash',
|
||||
'uri': 'url',
|
||||
'url': 'url',
|
||||
'vulnerability': 'vulnerability',
|
||||
'weakness': 'vulnerability'
|
||||
"ip": "ip",
|
||||
"ip-src": "ip",
|
||||
"ip-dst": "ip",
|
||||
"ip-src|port": "ip",
|
||||
"ip-dst|port": "ip",
|
||||
"domain": "domain",
|
||||
"hostname": "domain",
|
||||
"md5": "hash",
|
||||
"sha1": "hash",
|
||||
"sha256": "hash",
|
||||
"uri": "url",
|
||||
"url": "url",
|
||||
"vulnerability": "vulnerability",
|
||||
"weakness": "vulnerability",
|
||||
}
|
||||
|
||||
# Related entities from RF portrayed as related attributes in MISP
|
||||
# Related entities have 'Related' as part of the word and Links entities from RF
|
||||
# portrayed as related attributes in MISP
|
||||
self.related_attribute_types = [
|
||||
'RelatedIpAddress', 'RelatedInternetDomainName', 'RelatedHash',
|
||||
'RelatedEmailAddress', 'RelatedCyberVulnerability'
|
||||
"RelatedIpAddress",
|
||||
"RelatedInternetDomainName",
|
||||
"RelatedHash",
|
||||
"RelatedEmailAddress",
|
||||
"RelatedCyberVulnerability",
|
||||
"IpAddress",
|
||||
"InternetDomainName",
|
||||
"Hash",
|
||||
"EmailAddress",
|
||||
"CyberVulnerability",
|
||||
]
|
||||
# Related entities have 'Related' as part of the word and and Links entities from RF portrayed as tags in MISP
|
||||
self.galaxy_tag_types = [
|
||||
"RelatedMalware",
|
||||
"RelatedThreatActor",
|
||||
"Threat Actor",
|
||||
"MitreAttackIdentifier",
|
||||
"Malware",
|
||||
]
|
||||
# Related entities from RF portrayed as tags in MISP
|
||||
self.galaxy_tag_types = ['RelatedMalware', 'RelatedThreatActor']
|
||||
|
||||
def enrich(self) -> None:
|
||||
"""Run the enrichment."""
|
||||
category = self.type_to_rf_category.get(self.enriched_attribute.type)
|
||||
json_response = GLOBAL_REQUEST_HANDLER.rf_lookup(category, self.enriched_attribute.value)
|
||||
category = self.type_to_rf_category.get(self.enriched_attribute.type, "")
|
||||
enriched_attribute_value = self.enriched_attribute.value
|
||||
# If enriched attribute has a port we need to remove that port
|
||||
# since RF do not support enriching ip addresses with port
|
||||
if self.enriched_attribute.type in ["ip-src|port", "ip-dst|port"]:
|
||||
enriched_attribute_value = enriched_attribute_value.split("|")[0]
|
||||
json_response = GLOBAL_REQUEST_HANDLER.rf_lookup(
|
||||
category, enriched_attribute_value
|
||||
)
|
||||
response = json.loads(json_response.content)
|
||||
|
||||
try:
|
||||
# Add risk score and risk rules as tags to the enriched attribute
|
||||
risk_score = response['data']['risk']['score']
|
||||
risk_score = response["data"]["risk"]["score"]
|
||||
hex_color = self.color_picker.riskscore_color(risk_score)
|
||||
tag_name = f'recorded-future:risk-score="{risk_score}"'
|
||||
self.add_tag(tag_name, hex_color)
|
||||
for evidence in response['data']['risk']['evidenceDetails']:
|
||||
risk_rule = evidence['rule']
|
||||
criticality = evidence['criticality']
|
||||
risk_criticality = response["data"]["risk"]["criticalityLabel"]
|
||||
hex_color = self.color_picker.criticality_color(risk_criticality)
|
||||
tag_name = f'recorded-future:criticality="{risk_criticality}"'
|
||||
self.add_tag(tag_name, hex_color)
|
||||
|
||||
for evidence in response["data"]["risk"]["evidenceDetails"]:
|
||||
risk_rule = evidence["rule"]
|
||||
criticality = evidence["criticality"]
|
||||
hex_color = self.color_picker.riskrule_color(criticality)
|
||||
tag_name = f'recorded-future:risk-rule="{risk_rule}"'
|
||||
self.add_tag(tag_name, hex_color)
|
||||
|
||||
# Retrieve related entities
|
||||
for related_entity in response['data']['relatedEntities']:
|
||||
related_type = related_entity['type']
|
||||
if related_type in self.related_attribute_types:
|
||||
# Related entities returned as additional attributes
|
||||
for related in related_entity['entities']:
|
||||
if int(related["count"]) > 4:
|
||||
indicator = related['entity']['name']
|
||||
self.add_related_attribute(indicator, related_type)
|
||||
elif related_type in self.galaxy_tag_types:
|
||||
# Related entities added as galaxy-tags to the enriched attribute
|
||||
galaxy_tags = []
|
||||
for related in related_entity['entities']:
|
||||
if int(related["count"]) > 4:
|
||||
indicator = related['entity']['name']
|
||||
galaxy = self.galaxy_finder.find_galaxy_match(indicator, related_type)
|
||||
# Handle deduplication of galaxy tags
|
||||
if galaxy and galaxy not in galaxy_tags:
|
||||
galaxy_tags.append(galaxy)
|
||||
for galaxy in galaxy_tags:
|
||||
self.add_tag(galaxy)
|
||||
links_data = response["data"].get("links", {}).get("hits")
|
||||
# Check if we have error in links response. If yes, then user do not have right module enabled in token
|
||||
links_access_error = response["data"].get("links", {}).get("error")
|
||||
galaxy_tags = []
|
||||
if not links_access_error:
|
||||
for hit in links_data:
|
||||
for section in hit["sections"]:
|
||||
for sec_list in section["lists"]:
|
||||
entity_type = sec_list["type"]["name"]
|
||||
for entity in sec_list["entities"]:
|
||||
if entity_type in self.galaxy_tag_types:
|
||||
galaxy = self.galaxy_finder.find_galaxy_match(
|
||||
entity["name"], entity_type
|
||||
)
|
||||
if galaxy and galaxy not in galaxy_tags:
|
||||
galaxy_tags.append(galaxy)
|
||||
else:
|
||||
self.add_attribute(entity["name"], entity_type)
|
||||
|
||||
else:
|
||||
# Retrieve related entities
|
||||
for related_entity in response["data"]["relatedEntities"]:
|
||||
related_type = related_entity["type"]
|
||||
if related_type in self.related_attribute_types:
|
||||
# Related entities returned as additional attributes
|
||||
for related in related_entity["entities"]:
|
||||
# filter those entities that have count bigger than 4, to reduce noise
|
||||
# because there can be a huge list of related entities
|
||||
if int(related["count"]) > 4:
|
||||
indicator = related["entity"]["name"]
|
||||
self.add_attribute(indicator, related_type)
|
||||
elif related_type in self.galaxy_tag_types:
|
||||
# Related entities added as galaxy-tags to the enriched attribute
|
||||
galaxy_tags = []
|
||||
for related in related_entity["entities"]:
|
||||
# filter those entities that have count bigger than 4, to reduce noise
|
||||
# because there can be a huge list of related entities
|
||||
if int(related["count"]) > 4:
|
||||
indicator = related["entity"]["name"]
|
||||
galaxy = self.galaxy_finder.find_galaxy_match(
|
||||
indicator, related_type
|
||||
)
|
||||
# Handle deduplication of galaxy tags
|
||||
if galaxy and galaxy not in galaxy_tags:
|
||||
galaxy_tags.append(galaxy)
|
||||
for galaxy in galaxy_tags:
|
||||
self.add_tag(galaxy)
|
||||
|
||||
except KeyError:
|
||||
misperrors['error'] = 'Unexpected format in Recorded Future api response.'
|
||||
misperrors["error"] = "Unexpected format in Recorded Future api response."
|
||||
raise
|
||||
|
||||
def add_related_attribute(self, indicator: str, related_type: str) -> None:
|
||||
"""Helper method for adding an indicator to the related attribute list."""
|
||||
out_type = self.get_output_type(related_type, indicator)
|
||||
def add_attribute(self, indicator: str, indicator_type: str) -> None:
|
||||
"""Helper method for adding an indicator to the attribute list."""
|
||||
out_type = self.get_output_type(indicator_type, indicator)
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**{'value': indicator, 'type': out_type, 'distribution': 0})
|
||||
self.related_attributes.append((related_type, attribute))
|
||||
attribute.from_dict(**{"value": indicator, "type": out_type, "distribution": 0})
|
||||
self.related_attributes.append((indicator_type, attribute))
|
||||
|
||||
def add_tag(self, tag_name: str, hex_color: str = None) -> None:
|
||||
"""Helper method for adding a tag to the enriched attribute."""
|
||||
tag = MISPTag()
|
||||
tag_properties = {'name': tag_name}
|
||||
tag_properties = {"name": tag_name}
|
||||
if hex_color:
|
||||
tag_properties['colour'] = hex_color
|
||||
tag_properties["colour"] = hex_color
|
||||
tag.from_dict(**tag_properties)
|
||||
self.enriched_attribute.add_tag(tag)
|
||||
|
||||
def get_output_type(self, related_type: str, indicator: str) -> str:
|
||||
"""Helper method for translating a Recorded Future related type to a MISP output type."""
|
||||
output_type = 'text'
|
||||
if related_type == 'RelatedIpAddress':
|
||||
output_type = 'ip-dst'
|
||||
elif related_type == 'RelatedInternetDomainName':
|
||||
output_type = 'domain'
|
||||
elif related_type == 'RelatedHash':
|
||||
output_type = "text"
|
||||
if related_type in ["RelatedIpAddress", "IpAddress"]:
|
||||
output_type = "ip-dst"
|
||||
elif related_type in ["RelatedInternetDomainName", "InternetDomainName"]:
|
||||
output_type = "domain"
|
||||
elif related_type in ["RelatedHash", "Hash"]:
|
||||
hash_len = len(indicator)
|
||||
if hash_len == 64:
|
||||
output_type = 'sha256'
|
||||
output_type = "sha256"
|
||||
elif hash_len == 40:
|
||||
output_type = 'sha1'
|
||||
output_type = "sha1"
|
||||
elif hash_len == 32:
|
||||
output_type = 'md5'
|
||||
elif related_type == 'RelatedEmailAddress':
|
||||
output_type = 'email-src'
|
||||
elif related_type == 'RelatedCyberVulnerability':
|
||||
signature = indicator.split('-')[0]
|
||||
if signature == 'CVE':
|
||||
output_type = 'vulnerability'
|
||||
elif signature == 'CWE':
|
||||
output_type = 'weakness'
|
||||
output_type = "md5"
|
||||
elif related_type in ["RelatedEmailAddress", "EmailAddress"]:
|
||||
output_type = "email-src"
|
||||
elif related_type in ["RelatedCyberVulnerability", "CyberVulnerability"]:
|
||||
signature = indicator.split("-")[0]
|
||||
if signature == "CVE":
|
||||
output_type = "vulnerability"
|
||||
elif signature == "CWE":
|
||||
output_type = "weakness"
|
||||
elif related_type == "MalwareSignature":
|
||||
output_type = "malware-sample"
|
||||
elif related_type == "Organization":
|
||||
output_type = "target-org"
|
||||
elif related_type == "Username":
|
||||
output_type = "target-user"
|
||||
return output_type
|
||||
|
||||
def get_results(self) -> dict:
|
||||
"""Build and return the enrichment results."""
|
||||
self.enrichment_object.add_attribute('Enriched attribute', **self.enriched_attribute)
|
||||
self.enrichment_object.add_attribute(
|
||||
"Enriched attribute", **self.enriched_attribute
|
||||
)
|
||||
for related_type, attribute in self.related_attributes:
|
||||
self.enrichment_object.add_attribute(related_type, **attribute)
|
||||
self.event.add_object(**self.enrichment_object)
|
||||
event = json.loads(self.event.to_json())
|
||||
result = {key: event[key] for key in ['Object'] if key in event}
|
||||
return {'results': result}
|
||||
result = {key: event[key] for key in ["Object"] if key in event}
|
||||
return {"results": result}
|
||||
|
||||
|
||||
def get_proxy_settings(config: dict) -> dict:
|
||||
def get_proxy_settings(config: dict) -> Optional[Dict[str, str]]:
|
||||
"""Returns proxy settings in the requests format.
|
||||
If no proxy settings are set, return None."""
|
||||
If no proxy settings are set, return None."""
|
||||
proxies = None
|
||||
host = config.get('proxy_host')
|
||||
port = config.get('proxy_port')
|
||||
username = config.get('proxy_username')
|
||||
password = config.get('proxy_password')
|
||||
host = config.get("proxy_host")
|
||||
port = config.get("proxy_port")
|
||||
username = config.get("proxy_username")
|
||||
password = config.get("proxy_password")
|
||||
|
||||
if host:
|
||||
if not port:
|
||||
misperrors['error'] = 'The recordedfuture_proxy_host config is set, ' \
|
||||
'please also set the recordedfuture_proxy_port.'
|
||||
misperrors["error"] = (
|
||||
"The recordedfuture_proxy_host config is set, "
|
||||
"please also set the recordedfuture_proxy_port."
|
||||
)
|
||||
raise KeyError
|
||||
parsed = urlparse(host)
|
||||
if 'http' in parsed.scheme:
|
||||
scheme = 'http'
|
||||
if "http" in parsed.scheme:
|
||||
scheme = "http"
|
||||
else:
|
||||
scheme = parsed.scheme
|
||||
netloc = parsed.netloc
|
||||
host = f'{netloc}:{port}'
|
||||
host = f"{netloc}:{port}"
|
||||
|
||||
if username:
|
||||
if not password:
|
||||
misperrors['error'] = 'The recordedfuture_proxy_username config is set, ' \
|
||||
'please also set the recordedfuture_proxy_password.'
|
||||
misperrors["error"] = (
|
||||
"The recordedfuture_proxy_username config is set, "
|
||||
"please also set the recordedfuture_proxy_password."
|
||||
)
|
||||
raise KeyError
|
||||
auth = f'{username}:{password}'
|
||||
host = auth + '@' + host
|
||||
auth = f"{username}:{password}"
|
||||
host = auth + "@" + host
|
||||
|
||||
proxies = {
|
||||
'http': f'{scheme}://{host}',
|
||||
'https': f'{scheme}://{host}'
|
||||
}
|
||||
proxies = {"http": f"{scheme}://{host}", "https": f"{scheme}://{host}"}
|
||||
|
||||
LOGGER.info(f'Proxy settings: {proxies}')
|
||||
LOGGER.info(f"Proxy settings: {proxies}")
|
||||
return proxies
|
||||
|
||||
|
||||
|
@ -357,23 +496,25 @@ def handler(q=False):
|
|||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
config = request.get('config')
|
||||
if config and config.get('token'):
|
||||
GLOBAL_REQUEST_HANDLER.rf_token = config.get('token')
|
||||
config = request.get("config")
|
||||
if config and config.get("token"):
|
||||
GLOBAL_REQUEST_HANDLER.rf_token = config.get("token")
|
||||
else:
|
||||
misperrors['error'] = 'Missing Recorded Future token.'
|
||||
misperrors["error"] = "Missing Recorded Future token."
|
||||
return misperrors
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute'], requirements=('type', 'value')):
|
||||
return {'error': f'{standard_error_message}, {checking_error}.'}
|
||||
if request['attribute']['type'] not in mispattributes['input']:
|
||||
return {'error': 'Unsupported attribute type.'}
|
||||
if not request.get("attribute") or not check_input_attribute(
|
||||
request["attribute"], requirements=("type", "value")
|
||||
):
|
||||
return {"error": f"{standard_error_message}, {checking_error}."}
|
||||
if request["attribute"]["type"] not in mispattributes["input"]:
|
||||
return {"error": "Unsupported attribute type."}
|
||||
|
||||
try:
|
||||
GLOBAL_REQUEST_HANDLER.proxies = get_proxy_settings(config)
|
||||
except KeyError:
|
||||
return misperrors
|
||||
|
||||
input_attribute = request.get('attribute')
|
||||
input_attribute = request.get("attribute")
|
||||
rf_enricher = RFEnricher(input_attribute)
|
||||
|
||||
try:
|
||||
|
@ -392,5 +533,5 @@ def introspection():
|
|||
def version():
|
||||
"""Returns a dict with the version and the associated meta-data
|
||||
including potential configurations required of the module."""
|
||||
moduleinfo['config'] = moduleconfig
|
||||
moduleinfo["config"] = moduleconfig
|
||||
return moduleinfo
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
import requests
|
||||
from urllib.parse import urlparse
|
||||
import vt
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
||||
|
||||
|
@ -8,179 +9,211 @@ mispattributes = {'input': ['hostname', 'domain', "ip-src", "ip-dst", "md5", "sh
|
|||
'format': 'misp_standard'}
|
||||
|
||||
# possible module-types: 'expansion', 'hover' or both
|
||||
moduleinfo = {'version': '4', 'author': 'Hannah Ward',
|
||||
'description': 'Get information from VirusTotal',
|
||||
moduleinfo = {'version': '5', 'author': 'Hannah Ward',
|
||||
'description': 'Enrich observables with the VirusTotal v3 API',
|
||||
'module-type': ['expansion']}
|
||||
|
||||
# config fields that your code expects from the site admin
|
||||
moduleconfig = ["apikey", "event_limit"]
|
||||
moduleconfig = ["apikey", "event_limit", 'proxy_host', 'proxy_port', 'proxy_username', 'proxy_password']
|
||||
|
||||
|
||||
class VirusTotalParser(object):
|
||||
def __init__(self, apikey, limit):
|
||||
self.apikey = apikey
|
||||
self.limit = limit
|
||||
self.base_url = "https://www.virustotal.com/vtapi/v2/{}/report"
|
||||
DEFAULT_RESULTS_LIMIT = 10
|
||||
|
||||
|
||||
class VirusTotalParser:
|
||||
def __init__(self, client: vt.Client, limit: int) -> None:
|
||||
self.client = client
|
||||
self.limit = limit or DEFAULT_RESULTS_LIMIT
|
||||
self.misp_event = MISPEvent()
|
||||
self.attribute = MISPAttribute()
|
||||
self.parsed_objects = {}
|
||||
self.input_types_mapping = {'ip-src': self.parse_ip, 'ip-dst': self.parse_ip,
|
||||
'domain': self.parse_domain, 'hostname': self.parse_domain,
|
||||
'md5': self.parse_hash, 'sha1': self.parse_hash,
|
||||
'sha256': self.parse_hash, 'url': self.parse_url}
|
||||
self.proxies = None
|
||||
|
||||
def query_api(self, attribute):
|
||||
self.attribute = MISPAttribute()
|
||||
@staticmethod
|
||||
def get_total_analysis(analysis: dict, known_distributors: dict = None) -> int:
|
||||
if not analysis:
|
||||
return 0
|
||||
count = sum([analysis['undetected'], analysis['suspicious'], analysis['harmless']])
|
||||
return count if known_distributors else count + analysis['malicious']
|
||||
|
||||
def query_api(self, attribute: dict) -> None:
|
||||
self.attribute.from_dict(**attribute)
|
||||
return self.input_types_mapping[self.attribute.type](self.attribute.value, recurse=True)
|
||||
self.input_types_mapping[self.attribute.type](self.attribute.value)
|
||||
|
||||
def get_result(self):
|
||||
def get_result(self) -> dict:
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
|
||||
return {'results': results}
|
||||
|
||||
def add_vt_report(self, report: vt.Object) -> str:
|
||||
analysis = report.get('last_analysis_stats')
|
||||
total = self.get_total_analysis(analysis, report.get('known_distributors'))
|
||||
permalink = f'https://www.virustotal.com/gui/{report.type}/{report.id}'
|
||||
|
||||
vt_object = MISPObject('virustotal-report')
|
||||
vt_object.add_attribute('permalink', type='link', value=permalink)
|
||||
detection_ratio = f"{analysis['malicious']}/{total}" if analysis else '-/-'
|
||||
vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio, disable_correlation=True)
|
||||
self.misp_event.add_object(**vt_object)
|
||||
return vt_object.uuid
|
||||
|
||||
def create_misp_object(self, report: vt.Object) -> MISPObject:
|
||||
misp_object = None
|
||||
vt_uuid = self.add_vt_report(report)
|
||||
if report.type == 'file':
|
||||
misp_object = MISPObject('file')
|
||||
for hash_type in ('md5', 'sha1', 'sha256'):
|
||||
misp_object.add_attribute(**{'type': hash_type,
|
||||
'object_relation': hash_type,
|
||||
'value': report.get(hash_type)})
|
||||
elif report.type == 'domain':
|
||||
misp_object = MISPObject('domain-ip')
|
||||
misp_object.add_attribute('domain', type='domain', value=report.id)
|
||||
elif report.type == 'ip_address':
|
||||
misp_object = MISPObject('domain-ip')
|
||||
misp_object.add_attribute('ip', type='ip-dst', value=report.id)
|
||||
elif report.type == 'url':
|
||||
misp_object = MISPObject('url')
|
||||
misp_object.add_attribute('url', type='url', value=report.url)
|
||||
misp_object.add_reference(vt_uuid, 'analyzed-with')
|
||||
return misp_object
|
||||
|
||||
################################################################################
|
||||
#### Main parsing functions #### # noqa
|
||||
################################################################################
|
||||
|
||||
def parse_domain(self, domain, recurse=False):
|
||||
req = requests.get(self.base_url.format('domain'), params={'apikey': self.apikey, 'domain': domain})
|
||||
if req.status_code != 200:
|
||||
return req.status_code
|
||||
req = req.json()
|
||||
hash_type = 'sha256'
|
||||
whois = 'whois'
|
||||
feature_types = {'communicating': 'communicates-with',
|
||||
'downloaded': 'downloaded-from',
|
||||
'referrer': 'referring'}
|
||||
siblings = (self.parse_siblings(domain) for domain in req['domain_siblings'])
|
||||
uuid = self.parse_resolutions(req['resolutions'], req['subdomains'] if 'subdomains' in req else None, siblings)
|
||||
for feature_type, relationship in feature_types.items():
|
||||
for feature in ('undetected_{}_samples', 'detected_{}_samples'):
|
||||
for sample in req.get(feature.format(feature_type), [])[:self.limit]:
|
||||
status_code = self.parse_hash(sample[hash_type], False, uuid, relationship)
|
||||
if status_code != 200:
|
||||
return status_code
|
||||
if req.get(whois):
|
||||
whois_object = MISPObject(whois)
|
||||
whois_object.add_attribute('text', type='text', value=req[whois])
|
||||
def parse_domain(self, domain: str) -> str:
|
||||
domain_report = self.client.get_object(f'/domains/{domain}')
|
||||
|
||||
# DOMAIN
|
||||
domain_object = self.create_misp_object(domain_report)
|
||||
|
||||
# WHOIS
|
||||
if domain_report.whois:
|
||||
whois_object = MISPObject('whois')
|
||||
whois_object.add_attribute('text', type='text', value=domain_report.whois)
|
||||
self.misp_event.add_object(**whois_object)
|
||||
return self.parse_related_urls(req, recurse, uuid)
|
||||
|
||||
def parse_hash(self, sample, recurse=False, uuid=None, relationship=None):
|
||||
req = requests.get(self.base_url.format('file'), params={'apikey': self.apikey, 'resource': sample})
|
||||
status_code = req.status_code
|
||||
if req.status_code == 200:
|
||||
req = req.json()
|
||||
vt_uuid = self.parse_vt_object(req)
|
||||
file_attributes = []
|
||||
for hash_type in ('md5', 'sha1', 'sha256'):
|
||||
if req.get(hash_type):
|
||||
file_attributes.append({'type': hash_type, 'object_relation': hash_type,
|
||||
'value': req[hash_type]})
|
||||
if file_attributes:
|
||||
file_object = MISPObject('file')
|
||||
for attribute in file_attributes:
|
||||
file_object.add_attribute(**attribute)
|
||||
file_object.add_reference(vt_uuid, 'analyzed-with')
|
||||
if uuid and relationship:
|
||||
file_object.add_reference(uuid, relationship)
|
||||
# SIBLINGS AND SUBDOMAINS
|
||||
for relationship_name, misp_name in [('siblings', 'sibling-of'), ('subdomains', 'subdomain')]:
|
||||
rel_iterator = self.client.iterator(f'/domains/{domain_report.id}/{relationship_name}', limit=self.limit)
|
||||
for item in rel_iterator:
|
||||
attr = MISPAttribute()
|
||||
attr.from_dict(**dict(type='domain', value=item.id))
|
||||
self.misp_event.add_attribute(**attr)
|
||||
domain_object.add_reference(attr.uuid, misp_name)
|
||||
|
||||
# RESOLUTIONS
|
||||
resolutions_iterator = self.client.iterator(f'/domains/{domain_report.id}/resolutions', limit=self.limit)
|
||||
for resolution in resolutions_iterator:
|
||||
domain_object.add_attribute('ip', type='ip-dst', value=resolution.ip_address)
|
||||
|
||||
# COMMUNICATING, DOWNLOADED AND REFERRER FILES
|
||||
for relationship_name, misp_name in [
|
||||
('communicating_files', 'communicates-with'),
|
||||
('downloaded_files', 'downloaded-from'),
|
||||
('referrer_files', 'referring')
|
||||
]:
|
||||
files_iterator = self.client.iterator(f'/domains/{domain_report.id}/{relationship_name}', limit=self.limit)
|
||||
for file in files_iterator:
|
||||
file_object = self.create_misp_object(file)
|
||||
file_object.add_reference(domain_object.uuid, misp_name)
|
||||
self.misp_event.add_object(**file_object)
|
||||
return status_code
|
||||
|
||||
def parse_ip(self, ip, recurse=False):
|
||||
req = requests.get(self.base_url.format('ip-address'), params={'apikey': self.apikey, 'ip': ip})
|
||||
if req.status_code != 200:
|
||||
return req.status_code
|
||||
req = req.json()
|
||||
if req.get('asn'):
|
||||
asn_mapping = {'network': ('ip-src', 'subnet-announced'),
|
||||
'country': ('text', 'country')}
|
||||
asn_object = MISPObject('asn')
|
||||
asn_object.add_attribute('asn', type='AS', value=req['asn'])
|
||||
for key, value in asn_mapping.items():
|
||||
if req.get(key):
|
||||
attribute_type, relation = value
|
||||
asn_object.add_attribute(relation, type=attribute_type, value=req[key])
|
||||
self.misp_event.add_object(**asn_object)
|
||||
uuid = self.parse_resolutions(req['resolutions']) if req.get('resolutions') else None
|
||||
return self.parse_related_urls(req, recurse, uuid)
|
||||
# URLS
|
||||
urls_iterator = self.client.iterator(f'/domains/{domain_report.id}/urls', limit=self.limit)
|
||||
for url in urls_iterator:
|
||||
url_object = self.create_misp_object(url)
|
||||
url_object.add_reference(domain_object.uuid, 'hosted-in')
|
||||
self.misp_event.add_object(**url_object)
|
||||
|
||||
def parse_url(self, url, recurse=False, uuid=None):
|
||||
req = requests.get(self.base_url.format('url'), params={'apikey': self.apikey, 'resource': url})
|
||||
status_code = req.status_code
|
||||
if req.status_code == 200:
|
||||
req = req.json()
|
||||
vt_uuid = self.parse_vt_object(req)
|
||||
if not recurse:
|
||||
feature = 'url'
|
||||
url_object = MISPObject(feature)
|
||||
url_object.add_attribute(feature, type=feature, value=url)
|
||||
url_object.add_reference(vt_uuid, 'analyzed-with')
|
||||
if uuid:
|
||||
url_object.add_reference(uuid, 'hosted-in')
|
||||
self.misp_event.add_object(**url_object)
|
||||
return status_code
|
||||
self.misp_event.add_object(**domain_object)
|
||||
return domain_object.uuid
|
||||
|
||||
################################################################################
|
||||
#### Additional parsing functions #### # noqa
|
||||
################################################################################
|
||||
def parse_hash(self, file_hash: str) -> str:
|
||||
file_report = self.client.get_object(f'files/{file_hash}')
|
||||
file_object = self.create_misp_object(file_report)
|
||||
self.misp_event.add_object(**file_object)
|
||||
return file_object.uuid
|
||||
|
||||
def parse_related_urls(self, query_result, recurse, uuid=None):
|
||||
if recurse:
|
||||
for feature in ('detected_urls', 'undetected_urls'):
|
||||
if feature in query_result:
|
||||
for url in query_result[feature]:
|
||||
value = url['url'] if isinstance(url, dict) else url[0]
|
||||
status_code = self.parse_url(value, False, uuid)
|
||||
if status_code != 200:
|
||||
return status_code
|
||||
def parse_ip(self, ip: str) -> str:
|
||||
ip_report = self.client.get_object(f'/ip_addresses/{ip}')
|
||||
|
||||
# IP
|
||||
ip_object = self.create_misp_object(ip_report)
|
||||
|
||||
# ASN
|
||||
asn_object = MISPObject('asn')
|
||||
asn_object.add_attribute('asn', type='AS', value=ip_report.asn)
|
||||
asn_object.add_attribute('subnet-announced', type='ip-src', value=ip_report.network)
|
||||
asn_object.add_attribute('country', type='text', value=ip_report.country)
|
||||
self.misp_event.add_object(**asn_object)
|
||||
|
||||
# RESOLUTIONS
|
||||
resolutions_iterator = self.client.iterator(f'/ip_addresses/{ip_report.id}/resolutions', limit=self.limit)
|
||||
for resolution in resolutions_iterator:
|
||||
ip_object.add_attribute('domain', type='domain', value=resolution.host_name)
|
||||
|
||||
# URLS
|
||||
urls_iterator = self.client.iterator(f'/ip_addresses/{ip_report.id}/urls', limit=self.limit)
|
||||
for url in urls_iterator:
|
||||
url_object = self.create_misp_object(url)
|
||||
url_object.add_reference(ip_object.uuid, 'hosted-in')
|
||||
self.misp_event.add_object(**url_object)
|
||||
|
||||
self.misp_event.add_object(**ip_object)
|
||||
return ip_object.uuid
|
||||
|
||||
def parse_url(self, url: str) -> str:
|
||||
url_id = vt.url_id(url)
|
||||
url_report = self.client.get_object(f'/urls/{url_id}')
|
||||
url_object = self.create_misp_object(url_report)
|
||||
self.misp_event.add_object(**url_object)
|
||||
return url_object.uuid
|
||||
|
||||
|
||||
def get_proxy_settings(config: dict) -> dict:
|
||||
"""Returns proxy settings in the requests format.
|
||||
If no proxy settings are set, return None."""
|
||||
proxies = None
|
||||
host = config.get('proxy_host')
|
||||
port = config.get('proxy_port')
|
||||
username = config.get('proxy_username')
|
||||
password = config.get('proxy_password')
|
||||
|
||||
if host:
|
||||
if not port:
|
||||
misperrors['error'] = 'The virustotal_proxy_host config is set, ' \
|
||||
'please also set the virustotal_proxy_port.'
|
||||
raise KeyError
|
||||
parsed = urlparse(host)
|
||||
if 'http' in parsed.scheme:
|
||||
scheme = 'http'
|
||||
else:
|
||||
for feature in ('detected_urls', 'undetected_urls'):
|
||||
if feature in query_result:
|
||||
for url in query_result[feature]:
|
||||
value = url['url'] if isinstance(url, dict) else url[0]
|
||||
self.misp_event.add_attribute('url', value)
|
||||
return 200
|
||||
scheme = parsed.scheme
|
||||
netloc = parsed.netloc
|
||||
host = f'{netloc}:{port}'
|
||||
|
||||
def parse_resolutions(self, resolutions, subdomains=None, uuids=None):
|
||||
domain_ip_object = MISPObject('domain-ip')
|
||||
if self.attribute.type in ('domain', 'hostname'):
|
||||
domain_ip_object.add_attribute('domain', type='domain', value=self.attribute.value)
|
||||
attribute_type, relation, key = ('ip-dst', 'ip', 'ip_address')
|
||||
else:
|
||||
domain_ip_object.add_attribute('ip', type='ip-dst', value=self.attribute.value)
|
||||
attribute_type, relation, key = ('domain', 'domain', 'hostname')
|
||||
for resolution in resolutions:
|
||||
domain_ip_object.add_attribute(relation, type=attribute_type, value=resolution[key])
|
||||
if subdomains:
|
||||
for subdomain in subdomains:
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**dict(type='domain', value=subdomain))
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
domain_ip_object.add_reference(attribute.uuid, 'subdomain')
|
||||
if uuids:
|
||||
for uuid in uuids:
|
||||
domain_ip_object.add_reference(uuid, 'sibling-of')
|
||||
self.misp_event.add_object(**domain_ip_object)
|
||||
return domain_ip_object.uuid
|
||||
if username:
|
||||
if not password:
|
||||
misperrors['error'] = 'The virustotal_proxy_username config is set, ' \
|
||||
'please also set the virustotal_proxy_password.'
|
||||
raise KeyError
|
||||
auth = f'{username}:{password}'
|
||||
host = auth + '@' + host
|
||||
|
||||
def parse_siblings(self, domain):
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**dict(type='domain', value=domain))
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
return attribute.uuid
|
||||
|
||||
def parse_vt_object(self, query_result):
|
||||
if query_result['response_code'] == 1:
|
||||
vt_object = MISPObject('virustotal-report')
|
||||
vt_object.add_attribute('permalink', type='link', value=query_result['permalink'])
|
||||
detection_ratio = '{}/{}'.format(query_result['positives'], query_result['total'])
|
||||
vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio, disable_correlation=True)
|
||||
self.misp_event.add_object(**vt_object)
|
||||
return vt_object.uuid
|
||||
proxies = {
|
||||
'http': f'{scheme}://{host}',
|
||||
'https': f'{scheme}://{host}'
|
||||
}
|
||||
return proxies
|
||||
|
||||
|
||||
def parse_error(status_code):
|
||||
def parse_error(status_code: int) -> str:
|
||||
status_mapping = {204: 'VirusTotal request rate limit exceeded.',
|
||||
400: 'Incorrect request, please check the arguments.',
|
||||
403: 'You don\'t have enough privileges to make the request.'}
|
||||
|
@ -194,7 +227,7 @@ def handler(q=False):
|
|||
return False
|
||||
request = json.loads(q)
|
||||
if not request.get('config') or not request['config'].get('apikey'):
|
||||
misperrors['error'] = "A VirusTotal api key is required for this module."
|
||||
misperrors['error'] = 'A VirusTotal api key is required for this module.'
|
||||
return misperrors
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
|
@ -202,14 +235,21 @@ def handler(q=False):
|
|||
return {'error': 'Unsupported attribute type.'}
|
||||
|
||||
event_limit = request['config'].get('event_limit')
|
||||
if not isinstance(event_limit, int):
|
||||
event_limit = 5
|
||||
parser = VirusTotalParser(request['config']['apikey'], event_limit)
|
||||
attribute = request['attribute']
|
||||
status = parser.query_api(attribute)
|
||||
if status != 200:
|
||||
misperrors['error'] = parse_error(status)
|
||||
proxy_settings = get_proxy_settings(request.get('config'))
|
||||
|
||||
try:
|
||||
client = vt.Client(request['config']['apikey'],
|
||||
headers={
|
||||
'x-tool': 'MISPModuleVirusTotalExpansion',
|
||||
},
|
||||
proxy=proxy_settings['http'] if proxy_settings else None)
|
||||
parser = VirusTotalParser(client, int(event_limit) if event_limit else None)
|
||||
parser.query_api(attribute)
|
||||
except vt.APIError as ex:
|
||||
misperrors['error'] = ex.message
|
||||
return misperrors
|
||||
|
||||
return parser.get_result()
|
||||
|
||||
|
||||
|
@ -219,4 +259,4 @@ def introspection():
|
|||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
||||
return moduleinfo
|
|
@ -1,165 +1,205 @@
|
|||
import json
|
||||
import requests
|
||||
import logging
|
||||
import vt
|
||||
from . import check_input_attribute, standard_error_message
|
||||
from urllib.parse import urlparse
|
||||
from pymisp import MISPAttribute, MISPEvent, MISPObject
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
mispattributes = {'input': ['hostname', 'domain', "ip-src", "ip-dst", "md5", "sha1", "sha256", "url"],
|
||||
'format': 'misp_standard'}
|
||||
moduleinfo = {'version': '1', 'author': 'Christian Studer',
|
||||
'description': 'Get information from VirusTotal public API v2.',
|
||||
moduleinfo = {'version': '2', 'author': 'Christian Studer',
|
||||
'description': 'Enrich observables with the VirusTotal v3 public API',
|
||||
'module-type': ['expansion', 'hover']}
|
||||
|
||||
moduleconfig = ['apikey']
|
||||
moduleconfig = ['apikey', 'proxy_host', 'proxy_port', 'proxy_username', 'proxy_password']
|
||||
|
||||
LOGGER = logging.getLogger('virus_total_public')
|
||||
LOGGER.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class VirusTotalParser():
|
||||
def __init__(self):
|
||||
super(VirusTotalParser, self).__init__()
|
||||
DEFAULT_RESULTS_LIMIT = 10
|
||||
|
||||
|
||||
class VirusTotalParser:
|
||||
def __init__(self, client: vt.Client, limit: int) -> None:
|
||||
self.client = client
|
||||
self.limit = limit or DEFAULT_RESULTS_LIMIT
|
||||
self.misp_event = MISPEvent()
|
||||
|
||||
def declare_variables(self, apikey, attribute):
|
||||
self.attribute = MISPAttribute()
|
||||
self.attribute.from_dict(**attribute)
|
||||
self.apikey = apikey
|
||||
self.parsed_objects = {}
|
||||
self.input_types_mapping = {'ip-src': self.parse_ip, 'ip-dst': self.parse_ip,
|
||||
'domain': self.parse_domain, 'hostname': self.parse_domain,
|
||||
'md5': self.parse_hash, 'sha1': self.parse_hash,
|
||||
'sha256': self.parse_hash, 'url': self.parse_url}
|
||||
self.proxies = None
|
||||
|
||||
def get_result(self):
|
||||
@staticmethod
|
||||
def get_total_analysis(analysis: dict, known_distributors: dict = None) -> int:
|
||||
if not analysis:
|
||||
return 0
|
||||
count = sum([analysis['undetected'], analysis['suspicious'], analysis['harmless']])
|
||||
return count if known_distributors else count + analysis['malicious']
|
||||
|
||||
def query_api(self, attribute: dict) -> None:
|
||||
self.attribute.from_dict(**attribute)
|
||||
self.input_types_mapping[self.attribute.type](self.attribute.value)
|
||||
|
||||
def get_result(self) -> dict:
|
||||
event = json.loads(self.misp_event.to_json())
|
||||
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
|
||||
return {'results': results}
|
||||
|
||||
def parse_urls(self, query_result):
|
||||
for feature in ('detected_urls', 'undetected_urls'):
|
||||
if feature in query_result:
|
||||
for url in query_result[feature]:
|
||||
value = url['url'] if isinstance(url, dict) else url[0]
|
||||
self.misp_event.add_attribute('url', value)
|
||||
def add_vt_report(self, report: vt.Object) -> str:
|
||||
analysis = report.get('last_analysis_stats')
|
||||
total = self.get_total_analysis(analysis, report.get('known_distributors'))
|
||||
permalink = f'https://www.virustotal.com/gui/{report.type}/{report.id}'
|
||||
|
||||
def parse_resolutions(self, resolutions, subdomains=None, uuids=None):
|
||||
domain_ip_object = MISPObject('domain-ip')
|
||||
if self.attribute.type in ('domain', 'hostname'):
|
||||
domain_ip_object.add_attribute('domain', type='domain', value=self.attribute.value)
|
||||
attribute_type, relation, key = ('ip-dst', 'ip', 'ip_address')
|
||||
else:
|
||||
domain_ip_object.add_attribute('ip', type='ip-dst', value=self.attribute.value)
|
||||
attribute_type, relation, key = ('domain', 'domain', 'hostname')
|
||||
for resolution in resolutions:
|
||||
domain_ip_object.add_attribute(relation, type=attribute_type, value=resolution[key])
|
||||
if subdomains:
|
||||
for subdomain in subdomains:
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**dict(type='domain', value=subdomain))
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
domain_ip_object.add_reference(attribute.uuid, 'subdomain')
|
||||
if uuids:
|
||||
for uuid in uuids:
|
||||
domain_ip_object.add_reference(uuid, 'sibling-of')
|
||||
self.misp_event.add_object(**domain_ip_object)
|
||||
vt_object = MISPObject('virustotal-report')
|
||||
vt_object.add_attribute('permalink', type='link', value=permalink)
|
||||
detection_ratio = f"{analysis['malicious']}/{total}" if analysis else '-/-'
|
||||
vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio, disable_correlation=True)
|
||||
self.misp_event.add_object(**vt_object)
|
||||
return vt_object.uuid
|
||||
|
||||
def parse_vt_object(self, query_result):
|
||||
if query_result['response_code'] == 1:
|
||||
vt_object = MISPObject('virustotal-report')
|
||||
vt_object.add_attribute('permalink', type='link', value=query_result['permalink'])
|
||||
detection_ratio = '{}/{}'.format(query_result['positives'], query_result['total'])
|
||||
vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio)
|
||||
self.misp_event.add_object(**vt_object)
|
||||
def create_misp_object(self, report: vt.Object) -> MISPObject:
|
||||
misp_object = None
|
||||
vt_uuid = self.add_vt_report(report)
|
||||
if report.type == 'file':
|
||||
misp_object = MISPObject('file')
|
||||
for hash_type in ('md5', 'sha1', 'sha256'):
|
||||
misp_object.add_attribute(**{'type': hash_type,
|
||||
'object_relation': hash_type,
|
||||
'value': report.get(hash_type)})
|
||||
elif report.type == 'domain':
|
||||
misp_object = MISPObject('domain-ip')
|
||||
misp_object.add_attribute('domain', type='domain', value=report.id)
|
||||
elif report.type == 'ip_address':
|
||||
misp_object = MISPObject('domain-ip')
|
||||
misp_object.add_attribute('ip', type='ip-dst', value=report.id)
|
||||
elif report.type == 'url':
|
||||
misp_object = MISPObject('url')
|
||||
misp_object.add_attribute('url', type='url', value=report.url)
|
||||
misp_object.add_reference(vt_uuid, 'analyzed-with')
|
||||
return misp_object
|
||||
|
||||
def get_query_result(self, query_type):
|
||||
params = {query_type: self.attribute.value, 'apikey': self.apikey}
|
||||
return requests.get(self.base_url, params=params)
|
||||
################################################################################
|
||||
#### Main parsing functions #### # noqa
|
||||
################################################################################
|
||||
|
||||
def parse_domain(self, domain: str) -> str:
|
||||
domain_report = self.client.get_object(f'/domains/{domain}')
|
||||
|
||||
class DomainQuery(VirusTotalParser):
|
||||
def __init__(self, apikey, attribute):
|
||||
super(DomainQuery, self).__init__()
|
||||
self.base_url = "https://www.virustotal.com/vtapi/v2/domain/report"
|
||||
self.declare_variables(apikey, attribute)
|
||||
# DOMAIN
|
||||
domain_object = self.create_misp_object(domain_report)
|
||||
|
||||
def parse_report(self, query_result):
|
||||
hash_type = 'sha256'
|
||||
whois = 'whois'
|
||||
for feature_type in ('referrer', 'downloaded', 'communicating'):
|
||||
for feature in ('undetected_{}_samples', 'detected_{}_samples'):
|
||||
for sample in query_result.get(feature.format(feature_type), []):
|
||||
self.misp_event.add_attribute(hash_type, sample[hash_type])
|
||||
if query_result.get(whois):
|
||||
whois_object = MISPObject(whois)
|
||||
whois_object.add_attribute('text', type='text', value=query_result[whois])
|
||||
# WHOIS
|
||||
if domain_report.whois:
|
||||
whois_object = MISPObject('whois')
|
||||
whois_object.add_attribute('text', type='text', value=domain_report.whois)
|
||||
self.misp_event.add_object(**whois_object)
|
||||
if 'domain_siblings' in query_result:
|
||||
siblings = (self.parse_siblings(domain) for domain in query_result['domain_siblings'])
|
||||
if 'subdomains' in query_result:
|
||||
self.parse_resolutions(query_result['resolutions'], query_result['subdomains'], siblings)
|
||||
self.parse_urls(query_result)
|
||||
|
||||
def parse_siblings(self, domain):
|
||||
attribute = MISPAttribute()
|
||||
attribute.from_dict(**dict(type='domain', value=domain))
|
||||
self.misp_event.add_attribute(**attribute)
|
||||
return attribute.uuid
|
||||
# SIBLINGS AND SUBDOMAINS
|
||||
for relationship_name, misp_name in [('siblings', 'sibling-of'), ('subdomains', 'subdomain')]:
|
||||
rel_iterator = self.client.iterator(f'/domains/{domain_report.id}/{relationship_name}', limit=self.limit)
|
||||
for item in rel_iterator:
|
||||
attr = MISPAttribute()
|
||||
attr.from_dict(**dict(type='domain', value=item.id))
|
||||
self.misp_event.add_attribute(**attr)
|
||||
domain_object.add_reference(attr.uuid, misp_name)
|
||||
|
||||
# RESOLUTIONS
|
||||
resolutions_iterator = self.client.iterator(f'/domains/{domain_report.id}/resolutions', limit=self.limit)
|
||||
for resolution in resolutions_iterator:
|
||||
domain_object.add_attribute('ip', type='ip-dst', value=resolution.ip_address)
|
||||
|
||||
# COMMUNICATING AND REFERRER FILES
|
||||
for relationship_name, misp_name in [
|
||||
('communicating_files', 'communicates-with'),
|
||||
('referrer_files', 'referring')
|
||||
]:
|
||||
files_iterator = self.client.iterator(f'/domains/{domain_report.id}/{relationship_name}', limit=self.limit)
|
||||
for file in files_iterator:
|
||||
file_object = self.create_misp_object(file)
|
||||
file_object.add_reference(domain_object.uuid, misp_name)
|
||||
self.misp_event.add_object(**file_object)
|
||||
|
||||
self.misp_event.add_object(**domain_object)
|
||||
return domain_object.uuid
|
||||
|
||||
def parse_hash(self, file_hash: str) -> str:
|
||||
file_report = self.client.get_object(f'files/{file_hash}')
|
||||
file_object = self.create_misp_object(file_report)
|
||||
self.misp_event.add_object(**file_object)
|
||||
return file_object.uuid
|
||||
|
||||
def parse_ip(self, ip: str) -> str:
|
||||
ip_report = self.client.get_object(f'/ip_addresses/{ip}')
|
||||
|
||||
# IP
|
||||
ip_object = self.create_misp_object(ip_report)
|
||||
|
||||
# ASN
|
||||
asn_object = MISPObject('asn')
|
||||
asn_object.add_attribute('asn', type='AS', value=ip_report.asn)
|
||||
asn_object.add_attribute('subnet-announced', type='ip-src', value=ip_report.network)
|
||||
asn_object.add_attribute('country', type='text', value=ip_report.country)
|
||||
self.misp_event.add_object(**asn_object)
|
||||
|
||||
# RESOLUTIONS
|
||||
resolutions_iterator = self.client.iterator(f'/ip_addresses/{ip_report.id}/resolutions', limit=self.limit)
|
||||
for resolution in resolutions_iterator:
|
||||
ip_object.add_attribute('domain', type='domain', value=resolution.host_name)
|
||||
|
||||
self.misp_event.add_object(**ip_object)
|
||||
return ip_object.uuid
|
||||
|
||||
def parse_url(self, url: str) -> str:
|
||||
url_id = vt.url_id(url)
|
||||
url_report = self.client.get_object(f'/urls/{url_id}')
|
||||
url_object = self.create_misp_object(url_report)
|
||||
self.misp_event.add_object(**url_object)
|
||||
return url_object.uuid
|
||||
|
||||
|
||||
class HashQuery(VirusTotalParser):
|
||||
def __init__(self, apikey, attribute):
|
||||
super(HashQuery, self).__init__()
|
||||
self.base_url = "https://www.virustotal.com/vtapi/v2/file/report"
|
||||
self.declare_variables(apikey, attribute)
|
||||
def get_proxy_settings(config: dict) -> dict:
|
||||
"""Returns proxy settings in the requests format.
|
||||
If no proxy settings are set, return None."""
|
||||
proxies = None
|
||||
host = config.get('proxy_host')
|
||||
port = config.get('proxy_port')
|
||||
username = config.get('proxy_username')
|
||||
password = config.get('proxy_password')
|
||||
|
||||
def parse_report(self, query_result):
|
||||
file_attributes = []
|
||||
for hash_type in ('md5', 'sha1', 'sha256'):
|
||||
if query_result.get(hash_type):
|
||||
file_attributes.append({'type': hash_type, 'object_relation': hash_type,
|
||||
'value': query_result[hash_type]})
|
||||
if file_attributes:
|
||||
file_object = MISPObject('file')
|
||||
for attribute in file_attributes:
|
||||
file_object.add_attribute(**attribute)
|
||||
self.misp_event.add_object(**file_object)
|
||||
self.parse_vt_object(query_result)
|
||||
if host:
|
||||
if not port:
|
||||
misperrors['error'] = 'The virustotal_proxy_host config is set, ' \
|
||||
'please also set the virustotal_proxy_port.'
|
||||
raise KeyError
|
||||
parsed = urlparse(host)
|
||||
if 'http' in parsed.scheme:
|
||||
scheme = 'http'
|
||||
else:
|
||||
scheme = parsed.scheme
|
||||
netloc = parsed.netloc
|
||||
host = f'{netloc}:{port}'
|
||||
|
||||
if username:
|
||||
if not password:
|
||||
misperrors['error'] = 'The virustotal_proxy_username config is set, ' \
|
||||
'please also set the virustotal_proxy_password.'
|
||||
raise KeyError
|
||||
auth = f'{username}:{password}'
|
||||
host = auth + '@' + host
|
||||
|
||||
proxies = {
|
||||
'http': f'{scheme}://{host}',
|
||||
'https': f'{scheme}://{host}'
|
||||
}
|
||||
return proxies
|
||||
|
||||
|
||||
class IpQuery(VirusTotalParser):
|
||||
def __init__(self, apikey, attribute):
|
||||
super(IpQuery, self).__init__()
|
||||
self.base_url = "https://www.virustotal.com/vtapi/v2/ip-address/report"
|
||||
self.declare_variables(apikey, attribute)
|
||||
|
||||
def parse_report(self, query_result):
|
||||
if query_result.get('asn'):
|
||||
asn_mapping = {'network': ('ip-src', 'subnet-announced'),
|
||||
'country': ('text', 'country')}
|
||||
asn_object = MISPObject('asn')
|
||||
asn_object.add_attribute('asn', type='AS', value=query_result['asn'])
|
||||
for key, value in asn_mapping.items():
|
||||
if query_result.get(key):
|
||||
attribute_type, relation = value
|
||||
asn_object.add_attribute(relation, type=attribute_type, value=query_result[key])
|
||||
self.misp_event.add_object(**asn_object)
|
||||
self.parse_urls(query_result)
|
||||
if query_result.get('resolutions'):
|
||||
self.parse_resolutions(query_result['resolutions'])
|
||||
|
||||
|
||||
class UrlQuery(VirusTotalParser):
|
||||
def __init__(self, apikey, attribute):
|
||||
super(UrlQuery, self).__init__()
|
||||
self.base_url = "https://www.virustotal.com/vtapi/v2/url/report"
|
||||
self.declare_variables(apikey, attribute)
|
||||
|
||||
def parse_report(self, query_result):
|
||||
self.parse_vt_object(query_result)
|
||||
|
||||
|
||||
domain = ('domain', DomainQuery)
|
||||
ip = ('ip', IpQuery)
|
||||
file = ('resource', HashQuery)
|
||||
misp_type_mapping = {'domain': domain, 'hostname': domain, 'ip-src': ip,
|
||||
'ip-dst': ip, 'md5': file, 'sha1': file, 'sha256': file,
|
||||
'url': ('resource', UrlQuery)}
|
||||
|
||||
|
||||
def parse_error(status_code):
|
||||
def parse_error(status_code: int) -> str:
|
||||
status_mapping = {204: 'VirusTotal request rate limit exceeded.',
|
||||
400: 'Incorrect request, please check the arguments.',
|
||||
403: 'You don\'t have enough privileges to make the request.'}
|
||||
|
@ -173,22 +213,29 @@ def handler(q=False):
|
|||
return False
|
||||
request = json.loads(q)
|
||||
if not request.get('config') or not request['config'].get('apikey'):
|
||||
misperrors['error'] = "A VirusTotal api key is required for this module."
|
||||
misperrors['error'] = 'A VirusTotal api key is required for this module.'
|
||||
return misperrors
|
||||
if not request.get('attribute') or not check_input_attribute(request['attribute']):
|
||||
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
|
||||
attribute = request['attribute']
|
||||
if attribute['type'] not in mispattributes['input']:
|
||||
if request['attribute']['type'] not in mispattributes['input']:
|
||||
return {'error': 'Unsupported attribute type.'}
|
||||
query_type, to_call = misp_type_mapping[attribute['type']]
|
||||
parser = to_call(request['config']['apikey'], attribute)
|
||||
query_result = parser.get_query_result(query_type)
|
||||
status_code = query_result.status_code
|
||||
if status_code == 200:
|
||||
parser.parse_report(query_result.json())
|
||||
else:
|
||||
misperrors['error'] = parse_error(status_code)
|
||||
|
||||
event_limit = request['config'].get('event_limit')
|
||||
attribute = request['attribute']
|
||||
proxy_settings = get_proxy_settings(request.get('config'))
|
||||
|
||||
try:
|
||||
client = vt.Client(request['config']['apikey'],
|
||||
headers={
|
||||
'x-tool': 'MISPModuleVirusTotalPublicExpansion',
|
||||
},
|
||||
proxy=proxy_settings['http'] if proxy_settings else None)
|
||||
parser = VirusTotalParser(client, int(event_limit) if event_limit else None)
|
||||
parser.query_api(attribute)
|
||||
except vt.APIError as ex:
|
||||
misperrors['error'] = ex.message
|
||||
return misperrors
|
||||
|
||||
return parser.get_result()
|
||||
|
||||
|
||||
|
@ -198,4 +245,4 @@ def introspection():
|
|||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
||||
return moduleinfo
|
|
@ -0,0 +1,621 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Expansion module integrating with VMware NSX Defender.
|
||||
"""
|
||||
import argparse
|
||||
import base64
|
||||
import configparser
|
||||
import datetime
|
||||
import hashlib
|
||||
import io
|
||||
import ipaddress
|
||||
import json
|
||||
import logging
|
||||
import pymisp
|
||||
import sys
|
||||
import vt
|
||||
import zipfile
|
||||
from urllib import parse
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
import tau_clients
|
||||
from tau_clients import exceptions
|
||||
from tau_clients import nsx_defender
|
||||
|
||||
|
||||
logger = logging.getLogger("vmware_nsx")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
misperrors = {
|
||||
"error": "Error",
|
||||
}
|
||||
|
||||
mispattributes = {
|
||||
"input": [
|
||||
"attachment",
|
||||
"malware-sample",
|
||||
"url",
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha256",
|
||||
],
|
||||
"format": "misp_standard",
|
||||
}
|
||||
|
||||
moduleinfo = {
|
||||
"version": "0.2",
|
||||
"author": "Jason Zhang, Stefano Ortolani",
|
||||
"description": "Enrich a file or URL with VMware NSX Defender",
|
||||
"module-type": ["expansion", "hover"],
|
||||
}
|
||||
|
||||
moduleconfig = [
|
||||
"analysis_url", # optional, defaults to hard-coded values
|
||||
"analysis_verify_ssl", # optional, defaults to True
|
||||
"analysis_key", # required
|
||||
"analysis_api_token", # required
|
||||
"vt_key", # optional
|
||||
"misp_url", # optional
|
||||
"misp_verify_ssl", # optional, defaults to True
|
||||
"misp_key", # optional
|
||||
]
|
||||
|
||||
DEFAULT_ZIP_PASSWORD = b"infected"
|
||||
|
||||
DEFAULT_ENDPOINT = tau_clients.NSX_DEFENDER_DC_WESTUS
|
||||
|
||||
WORKFLOW_COMPLETE_TAG = "workflow:state='complete'"
|
||||
|
||||
WORKFLOW_INCOMPLETE_TAG = "workflow:state='incomplete'"
|
||||
|
||||
VT_DOWNLOAD_TAG = "vt:download"
|
||||
|
||||
GALAXY_ATTACK_PATTERNS_UUID = "c4e851fa-775f-11e7-8163-b774922098cd"
|
||||
|
||||
|
||||
class ResultParser:
|
||||
"""This is a parser to extract *basic* information from a result dictionary."""
|
||||
|
||||
def __init__(self, techniques_galaxy: Optional[Dict[str, str]] = None):
|
||||
"""Constructor."""
|
||||
self.techniques_galaxy = techniques_galaxy or {}
|
||||
|
||||
def parse(self, analysis_link: str, result: Dict[str, Any]) -> pymisp.MISPEvent:
|
||||
"""
|
||||
Parse the analysis result into a MISP event.
|
||||
|
||||
:param str analysis_link: the analysis link
|
||||
:param dict[str, any] result: the JSON returned by the analysis client.
|
||||
:rtype: pymisp.MISPEvent
|
||||
:return: a MISP event
|
||||
"""
|
||||
misp_event = pymisp.MISPEvent()
|
||||
|
||||
# Add analysis subject info
|
||||
if "url" in result["analysis_subject"]:
|
||||
o = pymisp.MISPObject("url")
|
||||
o.add_attribute("url", result["analysis_subject"]["url"])
|
||||
else:
|
||||
o = pymisp.MISPObject("file")
|
||||
o.add_attribute("md5", type="md5", value=result["analysis_subject"]["md5"])
|
||||
o.add_attribute("sha1", type="sha1", value=result["analysis_subject"]["sha1"])
|
||||
o.add_attribute("sha256", type="sha256", value=result["analysis_subject"]["sha256"])
|
||||
o.add_attribute(
|
||||
"mimetype",
|
||||
category="Payload delivery",
|
||||
type="mime-type",
|
||||
value=result["analysis_subject"]["mime_type"]
|
||||
)
|
||||
misp_event.add_object(o)
|
||||
|
||||
# Add HTTP requests from url analyses
|
||||
network_dict = result.get("report", {}).get("analysis", {}).get("network", {})
|
||||
for request in network_dict.get("requests", []):
|
||||
if not request["url"] and not request["ip"]:
|
||||
continue
|
||||
o = pymisp.MISPObject(name="http-request")
|
||||
o.add_attribute("method", "GET")
|
||||
if request["url"]:
|
||||
parsed_uri = parse.urlparse(request["url"])
|
||||
o.add_attribute("host", parsed_uri.netloc)
|
||||
o.add_attribute("uri", request["url"])
|
||||
if request["ip"]:
|
||||
o.add_attribute("ip-dst", request["ip"])
|
||||
misp_event.add_object(o)
|
||||
|
||||
# Add network behaviors from files
|
||||
for subject in result.get("report", {}).get("analysis_subjects", []):
|
||||
|
||||
# Add DNS requests
|
||||
for dns_query in subject.get("dns_queries", []):
|
||||
hostname = dns_query.get("hostname")
|
||||
# Skip if it is an IP address
|
||||
try:
|
||||
if hostname == "wpad" or hostname == "localhost":
|
||||
continue
|
||||
# Invalid hostname, e.g., hostname: ZLKKJRPY or 2.2.0.10.in-addr.arpa.
|
||||
if "." not in hostname or hostname[-1] == ".":
|
||||
continue
|
||||
_ = ipaddress.ip_address(hostname)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
o = pymisp.MISPObject(name="domain-ip")
|
||||
o.add_attribute("hostname", type="hostname", value=hostname)
|
||||
for ip in dns_query.get("results", []):
|
||||
o.add_attribute("ip", type="ip-dst", value=ip)
|
||||
|
||||
misp_event.add_object(o)
|
||||
|
||||
# Add HTTP conversations (as network connection and as http request)
|
||||
for http_conversation in subject.get("http_conversations", []):
|
||||
o = pymisp.MISPObject(name="network-connection")
|
||||
o.add_attribute("ip-src", http_conversation["src_ip"])
|
||||
o.add_attribute("ip-dst", http_conversation["dst_ip"])
|
||||
o.add_attribute("src-port", http_conversation["src_port"])
|
||||
o.add_attribute("dst-port", http_conversation["dst_port"])
|
||||
o.add_attribute("hostname-dst", http_conversation["dst_host"])
|
||||
o.add_attribute("layer3-protocol", "IP")
|
||||
o.add_attribute("layer4-protocol", "TCP")
|
||||
o.add_attribute("layer7-protocol", "HTTP")
|
||||
misp_event.add_object(o)
|
||||
|
||||
method, path, http_version = http_conversation["url"].split(" ")
|
||||
if http_conversation["dst_port"] == 80:
|
||||
uri = "http://{}{}".format(http_conversation["dst_host"], path)
|
||||
else:
|
||||
uri = "http://{}:{}{}".format(
|
||||
http_conversation["dst_host"],
|
||||
http_conversation["dst_port"],
|
||||
path
|
||||
)
|
||||
o = pymisp.MISPObject(name="http-request")
|
||||
o.add_attribute("host", http_conversation["dst_host"])
|
||||
o.add_attribute("method", method)
|
||||
o.add_attribute("uri", uri)
|
||||
o.add_attribute("ip-dst", http_conversation["dst_ip"])
|
||||
misp_event.add_object(o)
|
||||
|
||||
# Add sandbox info like score and sandbox type
|
||||
o = pymisp.MISPObject(name="sandbox-report")
|
||||
sandbox_type = "saas" if tau_clients.is_task_hosted(analysis_link) else "on-premise"
|
||||
o.add_attribute("score", result["score"])
|
||||
o.add_attribute("sandbox-type", sandbox_type)
|
||||
o.add_attribute("{}-sandbox".format(sandbox_type), "vmware-nsx-defender")
|
||||
o.add_attribute("permalink", analysis_link)
|
||||
misp_event.add_object(o)
|
||||
|
||||
# Add behaviors
|
||||
# Check if its not empty first, as at least one attribute has to be set for sb-signature object
|
||||
if result.get("malicious_activity", []):
|
||||
o = pymisp.MISPObject(name="sb-signature")
|
||||
o.add_attribute("software", "VMware NSX Defender")
|
||||
for activity in result.get("malicious_activity", []):
|
||||
a = pymisp.MISPAttribute()
|
||||
a.from_dict(type="text", value=activity)
|
||||
o.add_attribute("signature", **a)
|
||||
misp_event.add_object(o)
|
||||
|
||||
# Add mitre techniques
|
||||
for techniques in result.get("activity_to_mitre_techniques", {}).values():
|
||||
for technique in techniques:
|
||||
for misp_technique_id, misp_technique_name in self.techniques_galaxy.items():
|
||||
if technique["id"].casefold() in misp_technique_id.casefold():
|
||||
# If report details a sub-technique, trust the match
|
||||
# Otherwise trust it only if the MISP technique is not a sub-technique
|
||||
if "." in technique["id"] or "." not in misp_technique_id:
|
||||
misp_event.add_tag(misp_technique_name)
|
||||
break
|
||||
return misp_event
|
||||
|
||||
|
||||
def _parse_submission_response(response: Dict[str, Any]) -> Tuple[str, List[str]]:
|
||||
"""
|
||||
Parse the response from "submit_*" methods.
|
||||
|
||||
:param dict[str, any] response: the client response
|
||||
:rtype: tuple(str, list[str])
|
||||
:return: the task_uuid and whether the analysis is available
|
||||
:raises ValueError: in case of any error
|
||||
"""
|
||||
task_uuid = response.get("task_uuid")
|
||||
if not task_uuid:
|
||||
raise ValueError("Submission failed, unable to process the data")
|
||||
if response.get("score") is not None:
|
||||
tags = [WORKFLOW_COMPLETE_TAG]
|
||||
else:
|
||||
tags = [WORKFLOW_INCOMPLETE_TAG]
|
||||
return task_uuid, tags
|
||||
|
||||
|
||||
def _unzip(zipped_data: bytes, password: bytes = DEFAULT_ZIP_PASSWORD) -> bytes:
|
||||
"""
|
||||
Unzip the data.
|
||||
|
||||
:param bytes zipped_data: the zipped data
|
||||
:param bytes password: the password
|
||||
:rtype: bytes
|
||||
:return: the unzipped data
|
||||
:raises ValueError: in case of any error
|
||||
"""
|
||||
try:
|
||||
data_file_object = io.BytesIO(zipped_data)
|
||||
with zipfile.ZipFile(data_file_object) as zip_file:
|
||||
sample_hash_name = zip_file.namelist()[0]
|
||||
return zip_file.read(sample_hash_name, password)
|
||||
except (IOError, ValueError) as e:
|
||||
raise ValueError(str(e))
|
||||
|
||||
|
||||
def _download_from_vt(client: vt.Client, file_hash: str) -> bytes:
|
||||
"""
|
||||
Download file from VT.
|
||||
|
||||
:param vt.Client client: the VT client
|
||||
:param str file_hash: the file hash
|
||||
:rtype: bytes
|
||||
:return: the downloaded data
|
||||
:raises ValueError: in case of any error
|
||||
"""
|
||||
try:
|
||||
buffer = io.BytesIO()
|
||||
client.download_file(file_hash, buffer)
|
||||
buffer.seek(0, 0)
|
||||
return buffer.read()
|
||||
except (IOError, vt.APIError) as e:
|
||||
raise ValueError(str(e))
|
||||
finally:
|
||||
# vt.Client likes to free resources at shutdown, and it can be used as context to ease that
|
||||
# Since the structure of the module does not play well with how MISP modules are organized
|
||||
# let's play nice and close connections pro-actively (opened by "download_file")
|
||||
if client:
|
||||
client.close()
|
||||
|
||||
|
||||
def _get_analysis_tags(
|
||||
clients: Dict[str, nsx_defender.AnalysisClient],
|
||||
task_uuid: str,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Get the analysis tags of a task.
|
||||
|
||||
:param dict[str, nsx_defender.AnalysisClient] clients: the analysis clients
|
||||
:param str task_uuid: the task uuid
|
||||
:rtype: list[str]
|
||||
:return: the analysis tags
|
||||
:raises exceptions.ApiError: in case of client errors
|
||||
:raises exceptions.CommunicationError: in case of client communication errors
|
||||
"""
|
||||
client = clients[DEFAULT_ENDPOINT]
|
||||
response = client.get_analysis_tags(task_uuid)
|
||||
tags = set([])
|
||||
for tag in response.get("analysis_tags", []):
|
||||
tag_header = None
|
||||
tag_type = tag["data"]["type"]
|
||||
if tag_type == "av_family":
|
||||
tag_header = "av-fam"
|
||||
elif tag_type == "av_class":
|
||||
tag_header = "av-cls"
|
||||
elif tag_type == "lastline_malware":
|
||||
tag_header = "nsx"
|
||||
if tag_header:
|
||||
tags.add("{}:{}".format(tag_header, tag["data"]["value"]))
|
||||
return sorted(tags)
|
||||
|
||||
|
||||
def _get_latest_analysis(
|
||||
clients: Dict[str, nsx_defender.AnalysisClient],
|
||||
file_hash: str,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Get the latest analysis.
|
||||
|
||||
:param dict[str, nsx_defender.AnalysisClient] clients: the analysis clients
|
||||
:param str file_hash: the hash of the file
|
||||
:rtype: str|None
|
||||
:return: the task uuid if present, None otherwise
|
||||
:raises exceptions.ApiError: in case of client errors
|
||||
:raises exceptions.CommunicationError: in case of client communication errors
|
||||
"""
|
||||
def _parse_expiration(task_info: Dict[str, str]) -> datetime.datetime:
|
||||
"""
|
||||
Parse expiration time of a task
|
||||
|
||||
:param dict[str, str] task_info: the task
|
||||
:rtype: datetime.datetime
|
||||
:return: the parsed datetime object
|
||||
"""
|
||||
return datetime.datetime.strptime(task_info["expires"], "%Y-%m-%d %H:%M:%S")
|
||||
results = []
|
||||
for data_center, client in clients.items():
|
||||
response = client.query_file_hash(file_hash=file_hash)
|
||||
for task in response.get("tasks", []):
|
||||
results.append(task)
|
||||
if results:
|
||||
return sorted(results, key=_parse_expiration)[-1]["task_uuid"]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _get_mitre_techniques_galaxy(misp_client: pymisp.PyMISP) -> Dict[str, str]:
|
||||
"""
|
||||
Get all the MITRE techniques from the MISP galaxy.
|
||||
|
||||
:param pymisp.PyMISP misp_client: the MISP client
|
||||
:rtype: dict[str, str]
|
||||
:return: all techniques indexed by their id
|
||||
"""
|
||||
galaxy_attack_patterns = misp_client.get_galaxy(
|
||||
galaxy=GALAXY_ATTACK_PATTERNS_UUID,
|
||||
withCluster=True,
|
||||
pythonify=True,
|
||||
)
|
||||
ret = {}
|
||||
for cluster in galaxy_attack_patterns.clusters:
|
||||
ret[cluster.value] = cluster.tag_name
|
||||
return ret
|
||||
|
||||
|
||||
def introspection() -> Dict[str, Union[str, List[str]]]:
|
||||
"""
|
||||
Implement interface.
|
||||
|
||||
:return: the supported MISP attributes
|
||||
:rtype: dict[str, list[str]]
|
||||
"""
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version() -> Dict[str, Union[str, List[str]]]:
|
||||
"""
|
||||
Implement interface.
|
||||
|
||||
:return: the module config inside another dictionary
|
||||
:rtype: dict[str, list[str]]
|
||||
"""
|
||||
moduleinfo["config"] = moduleconfig
|
||||
return moduleinfo
|
||||
|
||||
|
||||
def handler(q: Union[bool, str] = False) -> Union[bool, Dict[str, Any]]:
|
||||
"""
|
||||
Implement interface.
|
||||
|
||||
:param bool|str q: the input received
|
||||
:rtype: bool|dict[str, any]
|
||||
"""
|
||||
if q is False:
|
||||
return False
|
||||
|
||||
request = json.loads(q)
|
||||
config = request.get("config", {})
|
||||
|
||||
# Load the client to connect to VMware NSX ATA (hard-fail)
|
||||
try:
|
||||
analysis_url = config.get("analysis_url")
|
||||
login_params = {
|
||||
"key": config["analysis_key"],
|
||||
"api_token": config["analysis_api_token"],
|
||||
}
|
||||
# If 'analysis_url' is specified we are connecting on-premise
|
||||
if analysis_url:
|
||||
analysis_clients = {
|
||||
DEFAULT_ENDPOINT: nsx_defender.AnalysisClient(
|
||||
api_url=analysis_url,
|
||||
login_params=login_params,
|
||||
verify_ssl=bool(config.get("analysis_verify_ssl", True)),
|
||||
)
|
||||
}
|
||||
logger.info("Connected NSX AnalysisClient to on-premise infrastructure")
|
||||
else:
|
||||
analysis_clients = {
|
||||
data_center: nsx_defender.AnalysisClient(
|
||||
api_url=tau_clients.NSX_DEFENDER_ANALYSIS_URLS[data_center],
|
||||
login_params=login_params,
|
||||
verify_ssl=bool(config.get("analysis_verify_ssl", True)),
|
||||
) for data_center in [
|
||||
tau_clients.NSX_DEFENDER_DC_WESTUS,
|
||||
tau_clients.NSX_DEFENDER_DC_NLEMEA,
|
||||
]
|
||||
}
|
||||
logger.info("Connected NSX AnalysisClient to hosted infrastructure")
|
||||
except KeyError as ke:
|
||||
logger.error("Integration with VMware NSX ATA failed to connect: %s", str(ke))
|
||||
return {"error": "Error connecting to VMware NSX ATA: {}".format(ke)}
|
||||
|
||||
# Load the client to connect to MISP (soft-fail)
|
||||
try:
|
||||
misp_client = pymisp.PyMISP(
|
||||
url=config["misp_url"],
|
||||
key=config["misp_key"],
|
||||
ssl=bool(config.get("misp_verify_ssl", True)),
|
||||
)
|
||||
except (KeyError, pymisp.PyMISPError):
|
||||
logger.error("Integration with pyMISP disabled: no MITRE techniques tags")
|
||||
misp_client = None
|
||||
|
||||
# Load the client to connect to VT (soft-fail)
|
||||
try:
|
||||
vt_client = vt.Client(apikey=config["vt_key"])
|
||||
except (KeyError, ValueError):
|
||||
logger.error("Integration with VT disabled: no automatic download of samples")
|
||||
vt_client = None
|
||||
|
||||
# Decode and issue the request
|
||||
try:
|
||||
if request["attribute"]["type"] == "url":
|
||||
sample_url = request["attribute"]["value"]
|
||||
response = analysis_clients[DEFAULT_ENDPOINT].submit_url(sample_url)
|
||||
task_uuid, tags = _parse_submission_response(response)
|
||||
else:
|
||||
if request["attribute"]["type"] == "malware-sample":
|
||||
# Raise TypeError
|
||||
file_data = _unzip(base64.b64decode(request["attribute"]["data"]))
|
||||
file_name = request["attribute"]["value"].split("|", 1)[0]
|
||||
hash_value = hashlib.sha1(file_data).hexdigest()
|
||||
elif request["attribute"]["type"] == "attachment":
|
||||
# Raise TypeError
|
||||
file_data = base64.b64decode(request["attribute"]["data"])
|
||||
file_name = request["attribute"].get("value")
|
||||
hash_value = hashlib.sha1(file_data).hexdigest()
|
||||
else:
|
||||
hash_value = request["attribute"]["value"]
|
||||
file_data = None
|
||||
file_name = "{}.bin".format(hash_value)
|
||||
# Check whether we have a task for that file
|
||||
tags = []
|
||||
task_uuid = _get_latest_analysis(analysis_clients, hash_value)
|
||||
if not task_uuid:
|
||||
# If we have no analysis, download the sample from VT
|
||||
if not file_data:
|
||||
if not vt_client:
|
||||
raise ValueError("No file available locally and VT is disabled")
|
||||
file_data = _download_from_vt(vt_client, hash_value)
|
||||
tags.append(VT_DOWNLOAD_TAG)
|
||||
# ... and submit it (_download_from_vt fails if no sample availabe)
|
||||
response = analysis_clients[DEFAULT_ENDPOINT].submit_file(file_data, file_name)
|
||||
task_uuid, _tags = _parse_submission_response(response)
|
||||
tags.extend(_tags)
|
||||
except KeyError as e:
|
||||
logger.error("Error parsing input: %s", request["attribute"])
|
||||
return {"error": "Error parsing input: {}".format(e)}
|
||||
except TypeError as e:
|
||||
logger.error("Error decoding input: %s", request["attribute"])
|
||||
return {"error": "Error decoding input: {}".format(e)}
|
||||
except ValueError as e:
|
||||
logger.error("Error processing input: %s", request["attribute"])
|
||||
return {"error": "Error processing input: {}".format(e)}
|
||||
except (exceptions.CommunicationError, exceptions.ApiError) as e:
|
||||
logger.error("Error issuing API call: %s", str(e))
|
||||
return {"error": "Error issuing API call: {}".format(e)}
|
||||
else:
|
||||
analysis_link = tau_clients.get_task_link(
|
||||
uuid=task_uuid,
|
||||
analysis_url=analysis_clients[DEFAULT_ENDPOINT].base,
|
||||
prefer_load_balancer=True,
|
||||
)
|
||||
|
||||
# Return partial results if the analysis has yet to terminate
|
||||
try:
|
||||
tags.extend(_get_analysis_tags(analysis_clients, task_uuid))
|
||||
report = analysis_clients[DEFAULT_ENDPOINT].get_result(task_uuid)
|
||||
except (exceptions.CommunicationError, exceptions.ApiError) as e:
|
||||
logger.error("Error retrieving the report: %s", str(e))
|
||||
return {
|
||||
"results": {
|
||||
"types": "link",
|
||||
"categories": ["External analysis"],
|
||||
"values": analysis_link,
|
||||
"tags": tags,
|
||||
}
|
||||
}
|
||||
|
||||
# Return the enrichment
|
||||
try:
|
||||
techniques_galaxy = None
|
||||
if misp_client:
|
||||
techniques_galaxy = _get_mitre_techniques_galaxy(misp_client)
|
||||
result_parser = ResultParser(techniques_galaxy=techniques_galaxy)
|
||||
misp_event = result_parser.parse(analysis_link, report)
|
||||
for tag in tags:
|
||||
if tag not in frozenset([WORKFLOW_COMPLETE_TAG]):
|
||||
misp_event.add_tag(tag)
|
||||
return {
|
||||
"results": {
|
||||
key: json.loads(misp_event.to_json())[key]
|
||||
for key in ("Attribute", "Object", "Tag")
|
||||
if (key in misp_event and misp_event[key])
|
||||
}
|
||||
}
|
||||
except pymisp.PyMISPError as e:
|
||||
logger.error("Error parsing the report: %s", str(e))
|
||||
return {"error": "Error parsing the report: {}".format(e)}
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function used to test basic functionalities of the module."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config-file",
|
||||
dest="config_file",
|
||||
required=True,
|
||||
help="the configuration file used for testing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--test-attachment",
|
||||
dest="test_attachment",
|
||||
default=None,
|
||||
help="the path to a test attachment",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
conf = configparser.ConfigParser()
|
||||
conf.read(args.config_file)
|
||||
config = {
|
||||
"analysis_verify_ssl": conf.getboolean("analysis", "analysis_verify_ssl"),
|
||||
"analysis_key": conf.get("analysis", "analysis_key"),
|
||||
"analysis_api_token": conf.get("analysis", "analysis_api_token"),
|
||||
"vt_key": conf.get("vt", "vt_key"),
|
||||
"misp_url": conf.get("misp", "misp_url"),
|
||||
"misp_verify_ssl": conf.getboolean("misp", "misp_verify_ssl"),
|
||||
"misp_key": conf.get("misp", "misp_key"),
|
||||
}
|
||||
|
||||
# TEST 1: submit a URL
|
||||
j = json.dumps(
|
||||
{
|
||||
"config": config,
|
||||
"attribute": {
|
||||
"type": "url",
|
||||
"value": "https://www.google.com",
|
||||
}
|
||||
}
|
||||
)
|
||||
print(json.dumps(handler(j), indent=4, sort_keys=True))
|
||||
|
||||
# TEST 2: submit a file attachment
|
||||
if args.test_attachment:
|
||||
with open(args.test_attachment, "rb") as f:
|
||||
data = f.read()
|
||||
j = json.dumps(
|
||||
{
|
||||
"config": config,
|
||||
"attribute": {
|
||||
"type": "attachment",
|
||||
"value": "test.docx",
|
||||
"data": base64.b64encode(data).decode("utf-8"),
|
||||
}
|
||||
}
|
||||
)
|
||||
print(json.dumps(handler(j), indent=4, sort_keys=True))
|
||||
|
||||
# TEST 3: submit a file hash that is known by NSX ATA
|
||||
j = json.dumps(
|
||||
{
|
||||
"config": config,
|
||||
"attribute": {
|
||||
"type": "md5",
|
||||
"value": "002c56165a0e78369d0e1023ce044bf0",
|
||||
}
|
||||
}
|
||||
)
|
||||
print(json.dumps(handler(j), indent=4, sort_keys=True))
|
||||
|
||||
# TEST 4 : submit a file hash that is NOT known byt NSX ATA
|
||||
j = json.dumps(
|
||||
{
|
||||
"config": config,
|
||||
"attribute": {
|
||||
"type": "sha1",
|
||||
"value": "2aac25ecdccf87abf6f1651ef2ffb30fcf732250",
|
||||
}
|
||||
}
|
||||
)
|
||||
print(json.dumps(handler(j), indent=4, sort_keys=True))
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
|
@ -17,7 +17,7 @@ def handler(q=False):
|
|||
misperrors['error'] = 'Query text missing'
|
||||
return misperrors
|
||||
|
||||
sparql = SPARQLWrapper(wiki_api_url)
|
||||
sparql = SPARQLWrapper(wiki_api_url, agent='Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36')
|
||||
query_string = \
|
||||
"SELECT ?item \n" \
|
||||
"WHERE { \n" \
|
||||
|
@ -26,7 +26,6 @@ def handler(q=False):
|
|||
sparql.setQuery(query_string)
|
||||
sparql.setReturnFormat(JSON)
|
||||
results = sparql.query().convert()
|
||||
summary = ''
|
||||
try:
|
||||
result = results["results"]["bindings"]
|
||||
summary = result[0]["item"]["value"] if result else 'No additional data found on Wikidata'
|
||||
|
|
|
@ -14,6 +14,12 @@ moduleconfig = []
|
|||
mispattributes = {'input': ['md5', 'sha1', 'sha256', 'filename|md5', 'filename|sha1', 'filename|sha256', 'imphash'], 'output': ['yara']}
|
||||
|
||||
|
||||
def extract_input_attribute(request):
|
||||
for input_type in mispattributes['input']:
|
||||
if input_type in request:
|
||||
return input_type, request[input_type]
|
||||
|
||||
|
||||
def get_hash_condition(hashtype, hashvalue):
|
||||
hashvalue = hashvalue.lower()
|
||||
required_module, params = ('pe', '()') if hashtype == 'imphash' else ('hash', '(0, filesize)')
|
||||
|
@ -24,11 +30,11 @@ def handler(q=False):
|
|||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
del request['module']
|
||||
if 'event_id' in request:
|
||||
del request['event_id']
|
||||
attribute = extract_input_attribute(request)
|
||||
if attribute is None:
|
||||
return {'error': f'Wrong input type, please choose in the following: {", ".join(mispattributes["input"])}'}
|
||||
uuid = request.pop('attribute_uuid') if 'attribute_uuid' in request else None
|
||||
attribute_type, value = list(request.items())[0]
|
||||
attribute_type, value = attribute
|
||||
if 'filename' in attribute_type:
|
||||
_, attribute_type = attribute_type.split('|')
|
||||
_, value = value.split('|')
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
__all__ = ['cef_export', 'mass_eql_export', 'liteexport', 'goamlexport', 'threat_connect_export', 'pdfexport',
|
||||
'threatStream_misp_export', 'osqueryexport', 'nexthinkexport', 'vt_graph', 'defender_endpoint_export']
|
||||
'threatStream_misp_export', 'osqueryexport', 'nexthinkexport', 'vt_graph', 'defender_endpoint_export',
|
||||
'virustotal_collections']
|
||||
|
|
|
@ -0,0 +1,134 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2022 Google Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Creates a VT Collection with indicators present in a given event."""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import requests
|
||||
|
||||
misperrors = {
|
||||
'error': 'Error'
|
||||
}
|
||||
|
||||
mispattributes = {
|
||||
'input': [
|
||||
'hostname',
|
||||
'domain',
|
||||
'ip-src',
|
||||
'ip-dst',
|
||||
'md5',
|
||||
'sha1',
|
||||
'sha256',
|
||||
'url'
|
||||
],
|
||||
'format': 'misp_standard',
|
||||
'responseType': 'application/txt',
|
||||
'outputFileExtension': 'txt',
|
||||
}
|
||||
|
||||
moduleinfo = {
|
||||
'version': '1.0',
|
||||
'author': 'VirusTotal',
|
||||
'description': 'Creates a VT Collection from an event iocs.',
|
||||
'module-type': ['export']
|
||||
}
|
||||
|
||||
moduleconfig = [
|
||||
'vt_api_key',
|
||||
'proxy_host',
|
||||
'proxy_port',
|
||||
'proxy_username',
|
||||
'proxy_password'
|
||||
]
|
||||
|
||||
|
||||
class VTError(Exception):
|
||||
"Exception class to map vt api response errors."
|
||||
pass
|
||||
|
||||
|
||||
def create_collection(api_key, event_data):
|
||||
headers = {
|
||||
'x-apikey': api_key,
|
||||
'content-type': 'application/json',
|
||||
'x-tool': 'MISPModuleVirusTotalCollectionExport',
|
||||
}
|
||||
|
||||
response = requests.post('https://www.virustotal.com/api/v3/integrations/misp/collections',
|
||||
headers=headers,
|
||||
json=event_data)
|
||||
|
||||
uuid = event_data['Event']['uuid']
|
||||
response_data = response.json()
|
||||
|
||||
if response.status_code == 200:
|
||||
link = response_data['data']['links']['self']
|
||||
return f'{uuid}: {link}'
|
||||
|
||||
error = response_data['error']['message']
|
||||
if response.status_code == 400:
|
||||
return f'{uuid}: {error}'
|
||||
else:
|
||||
misperrors['error'] = error
|
||||
raise VTError(error)
|
||||
|
||||
|
||||
def normalize_misp_data(data):
|
||||
normalized_data = {'Event': data.pop('Event', {})}
|
||||
for attr_key in data:
|
||||
if isinstance(data[attr_key], list) or isinstance(data[attr_key], dict):
|
||||
if attr_key == 'EventTag':
|
||||
normalized_data['Event']['Tag'] = [tag['Tag'] for tag in data[attr_key]]
|
||||
else:
|
||||
normalized_data['Event'][attr_key] = data[attr_key]
|
||||
|
||||
return normalized_data
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
request = json.loads(q)
|
||||
|
||||
if not request.get('config') or not request['config'].get('vt_api_key'):
|
||||
misperrors['error'] = 'A VirusTotal api key is required for this module.'
|
||||
return misperrors
|
||||
|
||||
config = request['config']
|
||||
data = request['data']
|
||||
responses = []
|
||||
|
||||
try:
|
||||
for event_data in data:
|
||||
normalized_event = normalize_misp_data(event_data)
|
||||
responses.append(create_collection(config.get('vt_api_key'),
|
||||
normalized_event))
|
||||
|
||||
output = '\n'.join(responses)
|
||||
return {
|
||||
"response": [],
|
||||
"data": str(base64.b64encode(bytes(output, 'utf-8')), 'utf-8'),
|
||||
}
|
||||
except VTError:
|
||||
return misperrors
|
||||
|
||||
|
||||
|
||||
def introspection():
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
|
@ -15,4 +15,5 @@ __all__ = [
|
|||
'csvimport',
|
||||
'cof2misp',
|
||||
'joe_import',
|
||||
'taxii21'
|
||||
]
|
||||
|
|
|
@ -22,7 +22,7 @@ import ndjson
|
|||
# from pymisp import MISPObject, MISPEvent, PyMISP
|
||||
from pymisp import MISPObject
|
||||
|
||||
from cof2misp.cof import validate_cof
|
||||
from cof2misp.cof import validate_cof, validate_dnsdbflex
|
||||
|
||||
|
||||
create_specific_attributes = False # this is for https://github.com/MISP/misp-objects/pull/314
|
||||
|
@ -37,7 +37,7 @@ mispattributes = {'inputSource': ['file'], 'output': ['MISP objects'],
|
|||
'format': 'misp_standard'}
|
||||
|
||||
|
||||
moduleinfo = {'version': '0.2', 'author': 'Aaron Kaplan',
|
||||
moduleinfo = {'version': '0.3', 'author': 'Aaron Kaplan',
|
||||
'description': 'Module to import the passive DNS Common Output Format (COF) and merge as a MISP objet into a MISP event.',
|
||||
'module-type': ['import']}
|
||||
|
||||
|
@ -81,7 +81,8 @@ def parse_and_insert_cof(data: str) -> dict:
|
|||
o = MISPObject(name='passive-dns', standalone=False, comment='created by cof2misp')
|
||||
|
||||
# o.add_tag('tlp:amber') # FIXME: we'll want to add a tlp: tag to the object
|
||||
o.add_attribute('bailiwick', value=entry['bailiwick'].rstrip('.'))
|
||||
if 'bailiwick' in entry:
|
||||
o.add_attribute('bailiwick', value=entry['bailiwick'].rstrip('.'), distribution=0)
|
||||
|
||||
#
|
||||
# handle the combinations of rrtype (domain, ip) on both left and right side
|
||||
|
@ -90,26 +91,26 @@ def parse_and_insert_cof(data: str) -> dict:
|
|||
if create_specific_attributes:
|
||||
if rrtype in ['A', 'AAAA', 'A6']: # address type
|
||||
# address type
|
||||
o.add_attribute('rrname_domain', value=rrname)
|
||||
o.add_attribute('rrname_domain', value=rrname, distribution=0)
|
||||
for r in rdata:
|
||||
o.add_attribute('rdata_ip', value=r)
|
||||
o.add_attribute('rdata_ip', value=r, distribution=0)
|
||||
elif rrtype in ['CNAME', 'DNAME', 'NS']: # both sides are domains
|
||||
o.add_attribute('rrname_domain', value=rrname)
|
||||
o.add_attribute('rrname_domain', value=rrname, distribution=0)
|
||||
for r in rdata:
|
||||
o.add_attribute('rdata_domain', value=r)
|
||||
o.add_attribute('rdata_domain', value=r, distribution=0)
|
||||
elif rrtype in ['SOA']: # left side is a domain, right side is text
|
||||
o.add_attribute('rrname_domain', value=rrname)
|
||||
o.add_attribute('rrname_domain', value=rrname, distribution=0)
|
||||
|
||||
#
|
||||
# now do the regular filling up of rrname, rrtype, time_first, etc.
|
||||
#
|
||||
o.add_attribute('rrname', value=rrname)
|
||||
o.add_attribute('rrtype', value=rrtype)
|
||||
o.add_attribute('rrname', value=rrname, distribution=0)
|
||||
o.add_attribute('rrtype', value=rrtype, distribution=0)
|
||||
for r in rdata:
|
||||
o.add_attribute('rdata', value=r)
|
||||
o.add_attribute('raw_rdata', value=json.dumps(rdata)) # FIXME: do we need to hex encode it?
|
||||
o.add_attribute('time_first', value=entry['time_first'])
|
||||
o.add_attribute('time_last', value=entry['time_last'])
|
||||
o.add_attribute('rdata', value=r, distribution=0)
|
||||
o.add_attribute('raw_rdata', value=json.dumps(rdata), distribution=0) # FIXME: do we need to hex encode it?
|
||||
o.add_attribute('time_first', value=entry['time_first'], distribution=0)
|
||||
o.add_attribute('time_last', value=entry['time_last'], distribution=0)
|
||||
o.first_seen = entry['time_first'] # is this redundant?
|
||||
o.last_seen = entry['time_last']
|
||||
|
||||
|
@ -118,7 +119,7 @@ def parse_and_insert_cof(data: str) -> dict:
|
|||
#
|
||||
for k in ['count', 'sensor_id', 'origin', 'text', 'time_first_ms', 'time_last_ms', 'zone_time_first', 'zone_time_last']:
|
||||
if k in entry and entry[k]:
|
||||
o.add_attribute(k, value=entry[k])
|
||||
o.add_attribute(k, value=entry[k], distribution=0)
|
||||
|
||||
#
|
||||
# add COF entry to MISP object
|
||||
|
@ -147,7 +148,36 @@ def parse_and_insert_dnsdbflex(data: str):
|
|||
--------
|
||||
none
|
||||
"""
|
||||
return {"error": "NOT IMPLEMENTED YET"} # XXX FIXME: need a MISP object for dnsdbflex
|
||||
objects = []
|
||||
try:
|
||||
entries = ndjson.loads(data)
|
||||
for entry in entries: # iterate over all ndjson lines
|
||||
# validate here (simple validation or full JSON Schema validation)
|
||||
if not validate_dnsdbflex(entry):
|
||||
return {"error": "Could not validate the dnsdbflex input '%s'" % entry}
|
||||
|
||||
# Next, extract some fields
|
||||
rrtype = entry['rrtype'].upper()
|
||||
rrname = entry['rrname'].rstrip('.')
|
||||
|
||||
# create a new MISP object, based on the passive-dns object for each nd-JSON line
|
||||
try:
|
||||
o = MISPObject(name='passive-dns', standalone=False, distribution=0, comment='DNSDBFLEX import by cof2misp')
|
||||
o.add_attribute('rrtype', value=rrtype, distribution=0, comment='DNSDBFLEX import by cof2misp')
|
||||
o.add_attribute('rrname', value=rrname, distribution=0, comment='DNSDBFLEX import by cof2misp')
|
||||
except Exception as ex:
|
||||
print("could not create object. Reason: %s" % str(ex))
|
||||
|
||||
#
|
||||
# add dnsdbflex entry to MISP object
|
||||
#
|
||||
objects.append(o.to_json())
|
||||
|
||||
r = {'results': {'Object': [json.loads(o) for o in objects]}}
|
||||
except Exception as ex:
|
||||
misperrors["error"] = "An error occured during parsing of input: '%s'" % (str(ex),)
|
||||
return misperrors
|
||||
return r
|
||||
|
||||
|
||||
def is_dnsdbflex(data: str) -> bool:
|
||||
|
|
|
@ -224,7 +224,8 @@ class CsvParser():
|
|||
|
||||
@staticmethod
|
||||
def __deal_with_tags(attribute):
|
||||
attribute['Tag'] = [{'name': tag.strip()} for tag in attribute['Tag'].split(',')]
|
||||
if 'Tag' in attribute.keys():
|
||||
attribute['Tag'] = [{'name': tag.strip()} for tag in attribute['Tag'].split(',')]
|
||||
|
||||
def __get_score(self):
|
||||
score = 1 if 'to_ids' in self.header else 0
|
||||
|
|
|
@ -5,9 +5,9 @@ from joe_parser import JoeParser
|
|||
|
||||
misperrors = {'error': 'Error'}
|
||||
userConfig = {
|
||||
"Import PE": {
|
||||
"Import Executable": {
|
||||
"type": "Boolean",
|
||||
"message": "Import PE Information",
|
||||
"message": "Import Executable Information (PE, elf or apk for instance)",
|
||||
},
|
||||
"Mitre Att&ck": {
|
||||
"type": "Boolean",
|
||||
|
@ -29,7 +29,7 @@ def handler(q=False):
|
|||
return False
|
||||
q = json.loads(q)
|
||||
config = {
|
||||
"import_pe": bool(int(q["config"]["Import PE"])),
|
||||
"import_executable": bool(int(q["config"]["Import Executable"])),
|
||||
"mitre_attack": bool(int(q["config"]["Mitre Att&ck"])),
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
|
||||
|
||||
Module (type "import") to import a Lastline report from an analysis link.
|
||||
"""
|
||||
import json
|
||||
|
|
|
@ -0,0 +1,373 @@
|
|||
"""
|
||||
Import content from a TAXII 2.1 server.
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
import json
|
||||
import misp_modules.lib.stix2misp
|
||||
from pathlib import Path
|
||||
import re
|
||||
import stix2.v20
|
||||
import taxii2client
|
||||
import taxii2client.exceptions
|
||||
import requests
|
||||
|
||||
|
||||
class ConfigError(Exception):
|
||||
"""
|
||||
Represents an error in the config settings for one invocation of this
|
||||
module.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
misperrors = {'error': 'Error'}
|
||||
|
||||
moduleinfo = {'version': '0.1', 'author': 'Abc',
|
||||
'description': 'Import content from a TAXII 2.1 server',
|
||||
'module-type': ['import']}
|
||||
|
||||
mispattributes = {
|
||||
'inputSource': [],
|
||||
'output': ['MISP objects'],
|
||||
'format': 'misp_standard',
|
||||
}
|
||||
|
||||
|
||||
userConfig = {
|
||||
"url": {
|
||||
"type": "String",
|
||||
"message": "A TAXII 2.1 collection URL",
|
||||
},
|
||||
"added_after": {
|
||||
"type": "String",
|
||||
"message": "Lower bound on time the object was uploaded to the TAXII server"
|
||||
},
|
||||
"stix_id": {
|
||||
"type": "String",
|
||||
"message": "STIX ID(s) of objects"
|
||||
},
|
||||
"spec_version": { # TAXII 2.1 specific
|
||||
"type": "String",
|
||||
"message": "STIX version(s) of objects"
|
||||
},
|
||||
"type": {
|
||||
"type": "String",
|
||||
"message": "STIX type(s) of objects"
|
||||
},
|
||||
"version": {
|
||||
"type": "String",
|
||||
"message": 'Version timestamp(s), or "first"/"last"/"all"'
|
||||
},
|
||||
# Should we give some user control over this? It will not be allowed to
|
||||
# exceed the admin setting.
|
||||
"STIX object limit": {
|
||||
"type": "Integer",
|
||||
"message": "Maximum number of STIX objects to process"
|
||||
},
|
||||
"username": {
|
||||
"type": "String",
|
||||
"message": "Username for TAXII server authentication, if necessary"
|
||||
},
|
||||
"password": {
|
||||
"type": "String",
|
||||
"message": "Password for TAXII server authentication, if necessary"
|
||||
}
|
||||
}
|
||||
|
||||
# Paging will be handled transparently by this module, so user-defined
|
||||
# paging-related filtering parameters will not be supported.
|
||||
|
||||
|
||||
# This module will not process more than this number of STIX objects in total
|
||||
# from a TAXII server in one module invocation (across all pages), to limit
|
||||
# resource consumption.
|
||||
moduleconfig = [
|
||||
"stix_object_limit"
|
||||
]
|
||||
|
||||
|
||||
# In case there is neither an admin nor user setting given.
|
||||
_DEFAULT_STIX_OBJECT_LIMIT = 1000
|
||||
|
||||
|
||||
# Page size to use when paging TAXII results. Trades off the amount of
|
||||
# hammering on TAXII servers and overhead of repeated requests, with the
|
||||
# resource consumption of a single page. (Should be an admin setting too?)
|
||||
_PAGE_SIZE = 100
|
||||
|
||||
|
||||
_synonymsToTagNames_path = Path(__file__).parent / "../../lib/synonymsToTagNames.json"
|
||||
|
||||
|
||||
# Collects module config information necessary to perform the TAXII query.
|
||||
Config = collections.namedtuple("Config", [
|
||||
"url",
|
||||
"added_after",
|
||||
"id",
|
||||
"spec_version",
|
||||
"type",
|
||||
"version",
|
||||
"stix_object_limit",
|
||||
"username",
|
||||
"password"
|
||||
])
|
||||
|
||||
|
||||
def _pymisp_to_json_serializable(obj):
|
||||
"""
|
||||
Work around a possible bug with PyMISP's
|
||||
AbstractMisp.to_dict(json_format=True) method, which doesn't always produce
|
||||
a JSON-serializable value (i.e. a value which is serializable with the
|
||||
default JSON encoder).
|
||||
|
||||
:param obj: A PyMISP object
|
||||
:return: A JSON-serializable version of the object
|
||||
"""
|
||||
|
||||
# The workaround creates a JSON string and then parses it back to a
|
||||
# JSON-serializable value.
|
||||
json_ = obj.to_json()
|
||||
json_serializable = json.loads(json_)
|
||||
|
||||
return json_serializable
|
||||
|
||||
|
||||
def _normalize_multi_values(value):
|
||||
"""
|
||||
Some TAXII filters may contain multiple values separated by commas,
|
||||
without spaces around the commas. Maybe give MISP users a little more
|
||||
flexibility? This function normalizes a possible multi-valued value
|
||||
(e.g. multiple values delimited by commas or spaces, all in the same
|
||||
string) to TAXII-required format.
|
||||
|
||||
:param value: A MISP config value
|
||||
:return: A normalized value
|
||||
"""
|
||||
|
||||
if "," in value:
|
||||
value = re.sub(r"\s*,\s*", ",", value)
|
||||
else:
|
||||
# Assume space delimiting; replace spaces with commas.
|
||||
# I don't think we need to worry about spaces embedded in values.
|
||||
value = re.sub(r"\s+", ",", value)
|
||||
|
||||
value = value.strip(",")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def _get_config(config):
|
||||
"""
|
||||
Combine user, admin, and default config settings to produce a config
|
||||
object with all settings together.
|
||||
|
||||
:param config: The misp-modules request's "config" value.
|
||||
:return: A Config object
|
||||
:raises ConfigError: if any config errors are detected
|
||||
"""
|
||||
|
||||
# Strip whitespace from all config settings... except for password?
|
||||
for key, val in config.items():
|
||||
if isinstance(val, str) and key != "password":
|
||||
config[key] = val.strip()
|
||||
|
||||
url = config.get("url")
|
||||
added_after = config.get("added_after")
|
||||
id_ = config.get("stix_id")
|
||||
spec_version = config.get("spec_version")
|
||||
type_ = config.get("type")
|
||||
version_ = config.get("version")
|
||||
username = config.get("username")
|
||||
password = config.get("password")
|
||||
admin_stix_object_limit = config.get("stix_object_limit")
|
||||
user_stix_object_limit = config.get("STIX object limit")
|
||||
|
||||
if admin_stix_object_limit:
|
||||
admin_stix_object_limit = int(admin_stix_object_limit)
|
||||
else:
|
||||
admin_stix_object_limit = _DEFAULT_STIX_OBJECT_LIMIT
|
||||
|
||||
if user_stix_object_limit:
|
||||
user_stix_object_limit = int(user_stix_object_limit)
|
||||
stix_object_limit = min(user_stix_object_limit, admin_stix_object_limit)
|
||||
else:
|
||||
stix_object_limit = admin_stix_object_limit
|
||||
|
||||
# How much of this should we sanity-check here before passing it off to the
|
||||
# TAXII client (and thence, to the TAXII server)?
|
||||
|
||||
if not url:
|
||||
raise ConfigError("A TAXII 2.1 collection URL is required.")
|
||||
|
||||
if admin_stix_object_limit < 1:
|
||||
raise ConfigError(
|
||||
"Invalid admin object limit: must be positive: "
|
||||
+ str(admin_stix_object_limit)
|
||||
)
|
||||
|
||||
if stix_object_limit < 1:
|
||||
raise ConfigError(
|
||||
"Invalid object limit: must be positive: "
|
||||
+ str(stix_object_limit)
|
||||
)
|
||||
|
||||
if id_:
|
||||
id_ = _normalize_multi_values(id_)
|
||||
if spec_version:
|
||||
spec_version = _normalize_multi_values(spec_version)
|
||||
if type_:
|
||||
type_ = _normalize_multi_values(type_)
|
||||
if version_:
|
||||
version_ = _normalize_multi_values(version_)
|
||||
|
||||
# STIX->MISP converter currently only supports STIX 2.0, so let's force
|
||||
# spec_version="2.0".
|
||||
if not spec_version:
|
||||
spec_version = "2.0"
|
||||
elif spec_version != "2.0":
|
||||
raise ConfigError('Only spec_version="2.0" is supported for now.')
|
||||
|
||||
if (username and not password) or (not username and password):
|
||||
raise ConfigError(
|
||||
'Both or neither of "username" and "password" are required.'
|
||||
)
|
||||
|
||||
config_obj = Config(
|
||||
url, added_after, id_, spec_version, type_, version_, stix_object_limit,
|
||||
username, password
|
||||
)
|
||||
|
||||
return config_obj
|
||||
|
||||
|
||||
def _query_taxii(config):
|
||||
"""
|
||||
Query the TAXII server according to the given config, convert the STIX
|
||||
results to MISP, and return a standard misp-modules response.
|
||||
|
||||
:param config: Module config information as a Config object
|
||||
:return: A dict containing a misp-modules response
|
||||
"""
|
||||
|
||||
collection = taxii2client.Collection(
|
||||
config.url, user=config.username, password=config.password
|
||||
)
|
||||
|
||||
# No point in asking for more than our overall limit.
|
||||
page_size = min(_PAGE_SIZE, config.stix_object_limit)
|
||||
|
||||
kwargs = {
|
||||
"per_request": page_size
|
||||
}
|
||||
|
||||
if config.spec_version:
|
||||
kwargs["spec_version"] = config.spec_version
|
||||
if config.version:
|
||||
kwargs["version"] = config.version
|
||||
if config.id:
|
||||
kwargs["id"] = config.id
|
||||
if config.type:
|
||||
kwargs["type"] = config.type
|
||||
if config.added_after:
|
||||
kwargs["added_after"] = config.added_after
|
||||
|
||||
pages = taxii2client.as_pages(
|
||||
collection.get_objects,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# Chain all the objects from all pages together...
|
||||
all_stix_objects = itertools.chain.from_iterable(
|
||||
taxii_envelope.get("objects", [])
|
||||
for taxii_envelope in pages
|
||||
)
|
||||
|
||||
# And only take the first N objects from that.
|
||||
limited_stix_objects = itertools.islice(
|
||||
all_stix_objects, 0, config.stix_object_limit
|
||||
)
|
||||
|
||||
# Collect into a list. This is... unfortunate, but I don't think the
|
||||
# converter will work incrementally (will it?). It expects all objects to
|
||||
# be given at once.
|
||||
#
|
||||
# It may also be desirable to have all objects available at once so that
|
||||
# cross-references can be made where possible, but it results in increased
|
||||
# memory usage.
|
||||
stix_objects = list(limited_stix_objects)
|
||||
|
||||
# The STIX 2.0 converter wants a 2.0 bundle. (Hope the TAXII server isn't
|
||||
# returning 2.1 objects!)
|
||||
bundle20 = stix2.v20.Bundle(stix_objects, allow_custom=True)
|
||||
|
||||
converter = misp_modules.lib.stix2misp.ExternalStixParser()
|
||||
converter.handler(
|
||||
bundle20, None, [0, "event", str(_synonymsToTagNames_path)]
|
||||
)
|
||||
|
||||
attributes = [
|
||||
_pymisp_to_json_serializable(attr)
|
||||
for attr in converter.misp_event.attributes
|
||||
]
|
||||
|
||||
objects = [
|
||||
_pymisp_to_json_serializable(obj)
|
||||
for obj in converter.misp_event.objects
|
||||
]
|
||||
|
||||
tags = [
|
||||
_pymisp_to_json_serializable(tag)
|
||||
for tag in converter.misp_event.tags
|
||||
]
|
||||
|
||||
result = {
|
||||
"results": {
|
||||
"Attribute": attributes,
|
||||
"Object": objects,
|
||||
"Tag": tags
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def handler(q=False):
|
||||
if q is False:
|
||||
return False
|
||||
request = json.loads(q)
|
||||
|
||||
result = None
|
||||
config = None
|
||||
|
||||
try:
|
||||
config = _get_config(request["config"])
|
||||
except ConfigError as e:
|
||||
result = misperrors
|
||||
result["error"] = e.args[0]
|
||||
|
||||
if not result:
|
||||
try:
|
||||
result = _query_taxii(config)
|
||||
except taxii2client.exceptions.TAXIIServiceException as e:
|
||||
result = misperrors
|
||||
result["error"] = str(e)
|
||||
except requests.HTTPError as e:
|
||||
# Let's give a better error message for auth issues.
|
||||
if e.response.status_code in (401, 403):
|
||||
result = misperrors
|
||||
result["error"] = "Access was denied."
|
||||
else:
|
||||
raise
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def introspection():
|
||||
mispattributes["userConfig"] = userConfig
|
||||
return mispattributes
|
||||
|
||||
|
||||
def version():
|
||||
moduleinfo['config'] = moduleconfig
|
||||
return moduleinfo
|
11
mkdocs.yml
|
@ -23,12 +23,11 @@ extra:
|
|||
search:
|
||||
languages: "en"
|
||||
social:
|
||||
- type: globe
|
||||
link: https://www.misp-project.org/
|
||||
- type: github-alt
|
||||
link: https://github.com/MISP
|
||||
- type: twitter
|
||||
link: https://twitter.com/MISPProject
|
||||
- icon: fontawesome/brands/twitter
|
||||
link: https://twitter.com/MISPProject
|
||||
- icon: fontawesome/brands/github-alt
|
||||
link: https://github.com/MISP
|
||||
|
||||
|
||||
theme:
|
||||
name: material
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta:__legacy__"
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"censys_enrich": {
|
||||
"api_id" : "<api_id>",
|
||||
"api_secret": "<api_secret>"
|
||||
},
|
||||
"crowdstrike_falcon": {
|
||||
"api_id" : "<api_id>",
|
||||
"apikey": "<apikey>"
|
||||
}
|
||||
}
|
|
@ -65,6 +65,8 @@ class TestExpansions(unittest.TestCase):
|
|||
if not isinstance(data, dict):
|
||||
print(json.dumps(data, indent=2))
|
||||
return data
|
||||
if 'results' not in data:
|
||||
return data
|
||||
for result in data['results']:
|
||||
values = result['values']
|
||||
if values:
|
||||
|
@ -110,6 +112,9 @@ class TestExpansions(unittest.TestCase):
|
|||
self.assertEqual(self.get_object(response), 'asn')
|
||||
|
||||
def test_btc_steroids(self):
|
||||
if LiveCI:
|
||||
return True
|
||||
|
||||
query = {"module": "btc_steroids", "btc": "1ES14c7qLb5CYhLMUekctxLgc1FV2Ti9DA"}
|
||||
response = self.misp_modules_post(query)
|
||||
try:
|
||||
|
@ -210,6 +215,25 @@ class TestExpansions(unittest.TestCase):
|
|||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_values(response), '\nThis is an basic test docx file. ')
|
||||
|
||||
def test_censys(self):
|
||||
module_name = "censys_enrich"
|
||||
query = {
|
||||
"attribute": {"type" : "ip-dst", "value": "8.8.8.8", "uuid": ""},
|
||||
"module": module_name,
|
||||
"config": {}
|
||||
}
|
||||
if module_name in self.configs:
|
||||
query['config'] = self.configs[module_name]
|
||||
response = self.misp_modules_post(query)
|
||||
|
||||
if self.configs[module_name].get('api_id') == '<api_id>':
|
||||
self.assertTrue(self.get_errors(response).startswith('ERROR: param '))
|
||||
else:
|
||||
self.assertGreaterEqual(len(response.json().get('results', {}).get('Attribute')), 1)
|
||||
else:
|
||||
response = self.misp_modules_post(query)
|
||||
self.assertTrue(self.get_errors(response).startswith('Please provide config options'))
|
||||
|
||||
def test_farsight_passivedns(self):
|
||||
module_name = 'farsight_passivedns'
|
||||
if module_name in self.configs:
|
||||
|
@ -250,7 +274,7 @@ class TestExpansions(unittest.TestCase):
|
|||
self.assertEqual(self.get_values(response), 'This IP is commonly spoofed in Internet-scan activity')
|
||||
except Exception:
|
||||
self.assertIn(
|
||||
self.get_errors(reponse),
|
||||
self.get_errors(response),
|
||||
(
|
||||
"Unauthorized. Please check your API key.",
|
||||
"Too many requests. You've hit the rate-limit."
|
||||
|
@ -260,6 +284,7 @@ class TestExpansions(unittest.TestCase):
|
|||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_errors(response), 'Missing Greynoise API key.')
|
||||
|
||||
@unittest.skip("Service doesn't work")
|
||||
def test_ipasn(self):
|
||||
query = {"module": "ipasn",
|
||||
"attribute": {"type": "ip-src",
|
||||
|
@ -268,6 +293,21 @@ class TestExpansions(unittest.TestCase):
|
|||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_object(response), 'asn')
|
||||
|
||||
def test_ipqs_fraud_and_risk_scoring(self):
|
||||
module_name = "ipqs_fraud_and_risk_scoring"
|
||||
query = {"module": module_name,
|
||||
"attribute": {"type": "email",
|
||||
"value": "noreply@ipqualityscore.com",
|
||||
"uuid": "ea89a33b-4ab7-4515-9f02-922a0bee333d"},
|
||||
"config": {}}
|
||||
if module_name in self.configs:
|
||||
query['config'] = self.configs[module_name]
|
||||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_values(response)['message'], 'Success.')
|
||||
else:
|
||||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_errors(response), 'IPQualityScore apikey is missing')
|
||||
|
||||
def test_macaddess_io(self):
|
||||
module_name = 'macaddress_io'
|
||||
query = {"module": module_name, "mac-address": "44:38:39:ff:ef:57"}
|
||||
|
@ -298,7 +338,7 @@ class TestExpansions(unittest.TestCase):
|
|||
encoded = b64encode(f.read()).decode()
|
||||
query = {"module": "ods_enrich", "attachment": filename, "data": encoded}
|
||||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_values(response), '\n column_0\n0 ods test')
|
||||
self.assertEqual(self.get_values(response), '\n column.0\n0 ods test')
|
||||
|
||||
def test_odt(self):
|
||||
filename = 'test.odt'
|
||||
|
@ -310,6 +350,8 @@ class TestExpansions(unittest.TestCase):
|
|||
|
||||
def test_onyphe(self):
|
||||
module_name = "onyphe"
|
||||
if LiveCI:
|
||||
return True
|
||||
query = {"module": module_name, "ip-src": "8.8.8.8"}
|
||||
if module_name in self.configs:
|
||||
query["config"] = self.configs[module_name]
|
||||
|
@ -324,6 +366,8 @@ class TestExpansions(unittest.TestCase):
|
|||
|
||||
def test_onyphe_full(self):
|
||||
module_name = "onyphe_full"
|
||||
if LiveCI:
|
||||
return True
|
||||
query = {"module": module_name, "ip-src": "8.8.8.8"}
|
||||
if module_name in self.configs:
|
||||
query["config"] = self.configs[module_name]
|
||||
|
@ -336,6 +380,7 @@ class TestExpansions(unittest.TestCase):
|
|||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_errors(response), 'Onyphe authentication is missing')
|
||||
|
||||
@unittest.skip("Unreliable results")
|
||||
def test_otx(self):
|
||||
query_types = ('domain', 'ip-src', 'md5')
|
||||
query_values = ('circl.lu', '8.8.8.8', '616eff3e9a7575ae73821b4668d2801c')
|
||||
|
@ -464,7 +509,7 @@ class TestExpansions(unittest.TestCase):
|
|||
query = {"module": "sourcecache", "link": input_value}
|
||||
response = self.misp_modules_post(query)
|
||||
self.assertEqual(self.get_values(response), input_value)
|
||||
self.assertTrue(self.get_data(response).startswith('PCFET0NUWVBFIEhUTUw+CjwhLS0KCUFyY2FuYSBieSBIVE1MN'))
|
||||
self.assertTrue(self.get_data(response))
|
||||
|
||||
def test_stix2_pattern_validator(self):
|
||||
query = {"module": "stix2_pattern_syntax_validator", "stix2-pattern": "[ipv4-addr:value = '8.8.8.8']"}
|
||||
|
@ -481,6 +526,25 @@ class TestExpansions(unittest.TestCase):
|
|||
response = self.misp_modules_post(query)
|
||||
self.assertTrue(self.get_values(response), result)
|
||||
|
||||
def test_crowdstrike(self):
|
||||
module_name = "crowdstrike_falcon"
|
||||
query = {
|
||||
"attribute": {"type": "sha256", "value": "", "uuid": ""},
|
||||
"module": module_name,
|
||||
"config": {}
|
||||
}
|
||||
if module_name in self.configs:
|
||||
query['config'] = self.configs[module_name]
|
||||
response = self.misp_modules_post(query)
|
||||
|
||||
if self.configs[module_name].get('api_id') == '<api_id>':
|
||||
self.assertTrue(self.get_errors(response).startswith('HTTP Error:'))
|
||||
else:
|
||||
self.assertGreaterEqual(len(response.json().get('results', {}).get('Attribute')), 1)
|
||||
else:
|
||||
response = self.misp_modules_post(query)
|
||||
self.assertTrue(self.get_errors(response).startswith('CrowdStrike apikey is missing'))
|
||||
|
||||
def test_threatminer(self):
|
||||
if LiveCI:
|
||||
return True
|
||||
|
|