Merge branch 'main' of github.com:MISP/misp-modules into new_module

new_module
Christian Studer 2022-10-21 14:38:13 +02:00
commit 8b12b811a8
130 changed files with 17432 additions and 3371 deletions

289
.gitchangelog.rc Normal file
View File

@ -0,0 +1,289 @@
# -*- coding: utf-8; mode: python -*-
##
## Format
##
## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...]
##
## Description
##
## ACTION is one of 'chg', 'fix', 'new'
##
## Is WHAT the change is about.
##
## 'chg' is for refactor, small improvement, cosmetic changes...
## 'fix' is for bug fixes
## 'new' is for new features, big improvement
##
## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc'|'docs'
##
## Is WHO is concerned by the change.
##
## 'dev' is for developpers (API changes, refactors...)
## 'usr' is for final users (UI changes)
## 'pkg' is for packagers (packaging changes)
## 'test' is for testers (test only related changes)
## 'doc' is for doc guys (doc only changes)
##
## COMMIT_MSG is ... well ... the commit message itself.
##
## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic'
##
## They are preceded with a '!' or a '@' (prefer the former, as the
## latter is wrongly interpreted in github.) Commonly used tags are:
##
## 'refactor' is obviously for refactoring code only
## 'minor' is for a very meaningless change (a typo, adding a comment)
## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...)
## 'wip' is for partial functionality but complete subfunctionality.
##
## Example:
##
## new: usr: support of bazaar implemented
## chg: re-indentend some lines !cosmetic
## new: dev: updated code to be compatible with last version of killer lib.
## fix: pkg: updated year of licence coverage.
## new: test: added a bunch of test around user usability of feature X.
## fix: typo in spelling my name in comment. !minor
##
## Please note that multi-line commit message are supported, and only the
## first line will be considered as the "summary" of the commit message. So
## tags, and other rules only applies to the summary. The body of the commit
## message will be displayed in the changelog without reformatting.
##
## ``ignore_regexps`` is a line of regexps
##
## Any commit having its full commit message matching any regexp listed here
## will be ignored and won't be reported in the changelog.
##
ignore_regexps = [
r'@minor', r'!minor',
r'@cosmetic', r'!cosmetic',
r'@refactor', r'!refactor',
r'@wip', r'!wip',
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:',
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:',
r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$',
]
## ``section_regexps`` is a list of 2-tuples associating a string label and a
## list of regexp
##
## Commit messages will be classified in sections thanks to this. Section
## titles are the label, and a commit is classified under this section if any
## of the regexps associated is matching.
##
## Please note that ``section_regexps`` will only classify commits and won't
## make any changes to the contents. So you'll probably want to go check
## ``subject_process`` (or ``body_process``) to do some changes to the subject,
## whenever you are tweaking this variable.
##
section_regexps = [
('New', [
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
]),
('Changes', [
r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
]),
('Fix', [
r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
]),
('Other', None ## Match all lines
),
]
## ``body_process`` is a callable
##
## This callable will be given the original body and result will
## be used in the changelog.
##
## Available constructs are:
##
## - any python callable that take one txt argument and return txt argument.
##
## - ReSub(pattern, replacement): will apply regexp substitution.
##
## - Indent(chars=" "): will indent the text with the prefix
## Please remember that template engines gets also to modify the text and
## will usually indent themselves the text if needed.
##
## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns
##
## - noop: do nothing
##
## - ucfirst: ensure the first letter is uppercase.
## (usually used in the ``subject_process`` pipeline)
##
## - final_dot: ensure text finishes with a dot
## (usually used in the ``subject_process`` pipeline)
##
## - strip: remove any spaces before or after the content of the string
##
## - SetIfEmpty(msg="No commit message."): will set the text to
## whatever given ``msg`` if the current text is empty.
##
## Additionally, you can `pipe` the provided filters, for instance:
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ")
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)')
#body_process = noop
body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip
## ``subject_process`` is a callable
##
## This callable will be given the original subject and result will
## be used in the changelog.
##
## Available constructs are those listed in ``body_process`` doc.
subject_process = (strip |
ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') |
SetIfEmpty("No commit message.") | ucfirst | final_dot)
## ``tag_filter_regexp`` is a regexp
##
## Tags that will be used for the changelog must match this regexp.
##
tag_filter_regexp = r'^v[0-9]+\.[0-9]+\.[0-9]+$'
## ``unreleased_version_label`` is a string or a callable that outputs a string
##
## This label will be used as the changelog Title of the last set of changes
## between last valid tag and HEAD if any.
unreleased_version_label = "%%version%% (unreleased)"
## ``output_engine`` is a callable
##
## This will change the output format of the generated changelog file
##
## Available choices are:
##
## - rest_py
##
## Legacy pure python engine, outputs ReSTructured text.
## This is the default.
##
## - mustache(<template_name>)
##
## Template name could be any of the available templates in
## ``templates/mustache/*.tpl``.
## Requires python package ``pystache``.
## Examples:
## - mustache("markdown")
## - mustache("restructuredtext")
##
## - makotemplate(<template_name>)
##
## Template name could be any of the available templates in
## ``templates/mako/*.tpl``.
## Requires python package ``mako``.
## Examples:
## - makotemplate("restructuredtext")
##
#output_engine = rest_py
#output_engine = mustache("restructuredtext")
output_engine = mustache("markdown")
#output_engine = makotemplate("restructuredtext")
## ``include_merge`` is a boolean
##
## This option tells git-log whether to include merge commits in the log.
## The default is to include them.
include_merge = True
## ``log_encoding`` is a string identifier
##
## This option tells gitchangelog what encoding is outputed by ``git log``.
## The default is to be clever about it: it checks ``git config`` for
## ``i18n.logOutputEncoding``, and if not found will default to git's own
## default: ``utf-8``.
#log_encoding = 'utf-8'
## ``publish`` is a callable
##
## Sets what ``gitchangelog`` should do with the output generated by
## the output engine. ``publish`` is a callable taking one argument
## that is an interator on lines from the output engine.
##
## Some helper callable are provided:
##
## Available choices are:
##
## - stdout
##
## Outputs directly to standard output
## (This is the default)
##
## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start())
##
## Creates a callable that will parse given file for the given
## regex pattern and will insert the output in the file.
## ``idx`` is a callable that receive the matching object and
## must return a integer index point where to insert the
## the output in the file. Default is to return the position of
## the start of the matched string.
##
## - FileRegexSubst(file, pattern, replace, flags)
##
## Apply a replace inplace in the given file. Your regex pattern must
## take care of everything and might be more complex. Check the README
## for a complete copy-pastable example.
##
# publish = FileInsertIntoFirstRegexMatch(
# "CHANGELOG.rst",
# r'/(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/',
# idx=lambda m: m.start(1)
# )
#publish = stdout
## ``revs`` is a list of callable or a list of string
##
## callable will be called to resolve as strings and allow dynamical
## computation of these. The result will be used as revisions for
## gitchangelog (as if directly stated on the command line). This allows
## to filter exaclty which commits will be read by gitchangelog.
##
## To get a full documentation on the format of these strings, please
## refer to the ``git rev-list`` arguments. There are many examples.
##
## Using callables is especially useful, for instance, if you
## are using gitchangelog to generate incrementally your changelog.
##
## Some helpers are provided, you can use them::
##
## - FileFirstRegexMatch(file, pattern): will return a callable that will
## return the first string match for the given pattern in the given file.
## If you use named sub-patterns in your regex pattern, it'll output only
## the string matching the regex pattern named "rev".
##
## - Caret(rev): will return the rev prefixed by a "^", which is a
## way to remove the given revision and all its ancestor.
##
## Please note that if you provide a rev-list on the command line, it'll
## replace this value (which will then be ignored).
##
## If empty, then ``gitchangelog`` will act as it had to generate a full
## changelog.
##
## The default is to use all commits to make the changelog.
#revs = ["^1.0.3", ]
#revs = [
# Caret(
# FileFirstRegexMatch(
# "CHANGELOG.rst",
# r"(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")),
# "HEAD"
#]
revs = []

53
.github/workflows/python-package.yml vendored Normal file
View File

@ -0,0 +1,53 @@
name: Python package
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10"]
steps:
- name: Install packages
run: |
sudo apt-get install libpoppler-cpp-dev libzbar0 tesseract-ocr
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Cache Python dependencies
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('REQUIREMENTS') }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest
# pyfaul must be installed manually (?)
pip install -r REQUIREMENTS pyfaup
pip install .
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
# Run server in background
misp-modules -l 127.0.0.1 -s &
sleep 5
# Check if modules are running
curl -sS localhost:6666/modules
# Run tests
pytest tests

11
.gitignore vendored
View File

@ -10,4 +10,13 @@ misp_modules.egg-info/
docs/expansion*
docs/import_mod*
docs/export_mod*
site*
site*
#pycharm env
.idea/*
#venv
venv*
#vscode
.vscode*

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "misp_modules/lib/misp-objects"]
path = misp_modules/lib/misp-objects
url = https://github.com/MISP/misp-objects.git
branch = main

View File

@ -11,13 +11,11 @@ python:
- "3.7-dev"
- "3.8-dev"
before_install:
- docker build -t misp-modules --build-arg BUILD_DATE=$(date -u +"%Y-%m-%d") docker/
install:
- sudo apt-get install libzbar0 libzbar-dev libpoppler-cpp-dev tesseract-ocr libfuzzy-dev libcaca-dev liblua5.3-dev
- pip install pipenv
- pipenv install --dev
- pip install -r REQUIREMENTS
# - pipenv install --dev
# install gtcaca
- git clone git://github.com/stricaud/gtcaca.git
- mkdir -p gtcaca/build
@ -37,20 +35,22 @@ install:
- popd
script:
- pipenv run coverage run -m --parallel-mode --source=misp_modules misp_modules.__init__ -l 127.0.0.1 &
- pip install coverage
- coverage run -m --parallel-mode --source=misp_modules misp_modules.__init__ -l 127.0.0.1 &
- pid=$!
- sleep 5
- pipenv run nosetests --with-coverage --cover-package=misp_modules
- nosetests --with-coverage --cover-package=misp_modules
- kill -s KILL $pid
- pushd ~/
- pipenv run coverage run -m --parallel-mode --source=misp_modules misp_modules.__init__ -s -l 127.0.0.1 &
- coverage run -m --parallel-mode --source=misp_modules misp_modules.__init__ -s -l 127.0.0.1 &
- pid=$!
- popd
- sleep 5
- pipenv run nosetests --with-coverage --cover-package=misp_modules
- nosetests --with-coverage --cover-package=misp_modules
- kill -s KILL $pid
- pipenv run flake8 --ignore=E501,W503,E226 misp_modules
- pip install flake8
- flake8 --ignore=E501,W503,E226,E126 misp_modules
after_success:
- pipenv run coverage combine .coverage*
- pipenv run codecov
- coverage combine .coverage*
- codecov

4602
ChangeLog.md Normal file

File diff suppressed because it is too large Load Diff

3
DOC-REQUIREMENTS Normal file
View File

@ -0,0 +1,3 @@
mkdocs
pymdown-extensions
mkdocs-material

View File

@ -3,12 +3,15 @@
.PHONY: prepare_docs generate_docs ci_generate_docs test_docs
prepare_docs:
cd doc; python generate_documentation.py
cd documentation; python3 generate_documentation.py
mkdir -p docs/expansion/logos docs/export_mod/logos docs/import_mod/logos
cp -R doc/logos/* docs/expansion/logos
cp -R doc/logos/* docs/export_mod/logos
cp -R doc/logos/* docs/import_mod/logos
cp LICENSE docs/license.md
mkdir -p docs/logos
cd documentation; cp -R ./logos/* ../docs/logos
cd documentation; cp -R ./logos/* ../docs/expansion/logos
cd documentation; cp -R ./logos/* ../docs/export_mod/logos
cd documentation; cp -R ./logos/* ../docs/import_mod/logos
cp ./documentation/mkdocs/*.md ./docs
cp LICENSE ./docs/license.md
install_requirements:
pip install -r docs/REQUIREMENTS.txt

44
Pipfile
View File

@ -11,58 +11,70 @@ flake8 = "*"
[packages]
dnspython = "*"
requests = {extras = ["security"],version = "*"}
requests = { extras = ["security"], version = "*" }
urlarchiver = "*"
passivetotal = "*"
pypdns = "*"
pypssl = "*"
pyeupi = "*"
uwhois = {editable = true,git = "https://github.com/Rafiot/uwhoisd.git",ref = "testing",subdirectory = "client"}
pymisp = {editable = true,extras = ["fileobjects,openioc,pdfexport"],git = "https://github.com/MISP/PyMISP.git"}
pyonyphe = {editable = true,git = "https://github.com/sebdraven/pyonyphe"}
pydnstrails = {editable = true,git = "https://github.com/sebdraven/pydnstrails"}
pymisp = { extras = ["fileobjects,openioc,pdfexport,email,url"], version = "*" }
pyonyphe = { git = "https://github.com/sebdraven/pyonyphe" }
pydnstrails = { git = "https://github.com/sebdraven/pydnstrails" }
pytesseract = "*"
pygeoip = "*"
beautifulsoup4 = "*"
oauth2 = "*"
yara-python = "==3.8.1"
sigmatools = "*"
stix2 = "*"
stix2-patterns = "*"
taxii2-client = "*"
maclookup = "*"
vulners = "*"
blockchain = "*"
reportlab = "*"
pyintel471 = {editable = true,git = "https://github.com/MISP/PyIntel471.git"}
pyintel471 = { git = "https://github.com/MISP/PyIntel471.git" }
shodan = "*"
Pillow = "*"
Pillow = ">=8.2.0"
Wand = "*"
SPARQLWrapper = "*"
domaintools_api = "*"
misp-modules = {editable = true,path = "."}
pybgpranking = {editable = true,git = "https://github.com/D4-project/BGP-Ranking.git/",subdirectory = "client"}
pyipasnhistory = {editable = true,git = "https://github.com/D4-project/IPASN-History.git/",subdirectory = "client"}
misp-modules = { path = "." }
pybgpranking = { git = "https://github.com/D4-project/BGP-Ranking.git/", subdirectory = "client", ref = "68de39f6c5196f796055c1ac34504054d688aa59" }
pyipasnhistory = { git = "https://github.com/D4-project/IPASN-History.git/", subdirectory = "client", ref = "a2853c39265cecdd0c0d16850bd34621c0551b87" }
backscatter = "*"
pyzbar = "*"
opencv-python = "*"
np = "*"
ODTReader = {editable = true,git = "https://github.com/cartertemm/ODTReader.git/"}
ODTReader = { git = "https://github.com/cartertemm/ODTReader.git/" }
python-pptx = "*"
python-docx = "*"
ezodf = "*"
pandas = "*"
pandas_ods_reader = "*"
pandas = "==1.3.5"
pandas_ods_reader = "==0.1.2"
pdftotext = "*"
lxml = "*"
xlrd = "*"
idna-ssl = {markers = "python_version < '3.7'"}
jbxapi = "*"
geoip2 = "*"
apiosintDS = "*"
assemblyline_client = "*"
vt-graph-api = "*"
trustar = "*"
trustar = { git = "https://github.com/SteveClement/trustar-python.git" }
markdownify = "==0.5.3"
socialscan = "*"
dnsdb2 = "*"
clamd = "*"
aiohttp = ">=3.7.4"
tau-clients = "*"
vt-py = ">=0.7.1"
crowdstrike-falconpy = "0.9.0"
censys = "2.0.9"
mwdblib = "3.4.1"
ndjson = "0.3.1"
Jinja2 = "3.1.2"
mattermostdriver = "7.3.2"
openpyxl = "*"
[requires]
python_version = "3"
python_version = "3.7"

1597
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,14 @@
# MISP modules
[![Build Status](https://travis-ci.org/MISP/misp-modules.svg?branch=master)](https://travis-ci.org/MISP/misp-modules)
[![Coverage Status](https://coveralls.io/repos/github/MISP/misp-modules/badge.svg?branch=master)](https://coveralls.io/github/MISP/misp-modules?branch=master)
[![codecov](https://codecov.io/gh/MISP/misp-modules/branch/master/graph/badge.svg)](https://codecov.io/gh/MISP/misp-modules)
[![Python package](https://github.com/MISP/misp-modules/actions/workflows/python-package.yml/badge.svg)](https://github.com/MISP/misp-modules/actions/workflows/python-package.yml)[![Coverage Status](https://coveralls.io/repos/github/MISP/misp-modules/badge.svg?branch=main)](https://coveralls.io/github/MISP/misp-modules?branch=main)
[![codecov](https://codecov.io/gh/MISP/misp-modules/branch/main/graph/badge.svg)](https://codecov.io/gh/MISP/misp-modules)
MISP modules are autonomous modules that can be used for expansion and other services in [MISP](https://github.com/MISP/MISP).
MISP modules are autonomous modules that can be used to extend [MISP](https://github.com/MISP/MISP) for new services such as expansion, import and export.
The modules are written in Python 3 following a simple API interface. The objective is to ease the extensions of MISP functionalities
without modifying core components. The API is available via a simple REST API which is independent from MISP installation or configuration.
MISP modules support is included in MISP starting from version 2.4.28.
For more information: [Extending MISP with Python modules](https://www.misp-project.org/misp-training/3.1-misp-modules.pdf) slides from MISP training.
For more information: [Extending MISP with Python modules](https://www.misp-project.org/misp-training/3.1-misp-modules.pdf) slides from [MISP training](https://github.com/MISP/misp-training).
## Existing MISP modules
@ -50,6 +47,7 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj
* [hashdd](misp_modules/modules/expansion/hashdd.py) - a hover module to check file hashes against [hashdd.com](http://www.hashdd.com) including NSLR dataset.
* [hibp](misp_modules/modules/expansion/hibp.py) - a hover module to lookup against Have I Been Pwned?
* [html_to_markdown](misp_modules/modules/expansion/html_to_markdown.py) - Simple HTML to markdown converter
* [HYAS Insight](misp_modules/modules/expansion/hyasinsight.py) - a hover and expansion module to get information from [HYAS Insight](https://www.hyas.com/hyas-insight).
* [intel471](misp_modules/modules/expansion/intel471.py) - an expansion module to get info from [Intel471](https://intel471.com).
* [IPASN](misp_modules/modules/expansion/ipasn.py) - a hover and expansion to get the BGP ASN of an IP address.
* [iprep](misp_modules/modules/expansion/iprep.py) - an expansion module to get IP reputation from packetmail.net.
@ -60,6 +58,8 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj
* [macaddress.io](misp_modules/modules/expansion/macaddress_io.py) - a hover module to retrieve vendor details and other information regarding a given MAC address or an OUI from [MAC address Vendor Lookup](https://macaddress.io). See [integration tutorial here](https://macaddress.io/integrations/MISP-module).
* [macvendors](misp_modules/modules/expansion/macvendors.py) - a hover module to retrieve mac vendor information.
* [MALWAREbazaar](misp_modules/modules/expansion/malwarebazaar.py) - an expansion module to query MALWAREbazaar with some payload.
* [McAfee MVISION Insights](misp_modules/modules/expansion/mcafee_insights_enrich.py) - an expansion module enrich IOCs with McAfee MVISION Insights.
* [Mmdb server lookup](misp_modules/modules/expansion/mmdb_lookup.py) - an expansion module to enrich an ip with geolocation information from an mmdb server such as ip.circl.lu.
* [ocr-enrich](misp_modules/modules/expansion/ocr_enrich.py) - an enrichment module to get OCRized data from images into MISP.
* [ods-enrich](misp_modules/modules/expansion/ods_enrich.py) - an enrichment module to get text out of OpenOffice spreadsheet document into MISP (using free-text parser).
* [odt-enrich](misp_modules/modules/expansion/odt_enrich.py) - an enrichment module to get text out of OpenOffice document into MISP (using free-text parser).
@ -89,6 +89,7 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj
* [virustotal](misp_modules/modules/expansion/virustotal.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a high request rate limit required. (More details about the API: [here](https://developers.virustotal.com/reference))
* [virustotal_public](misp_modules/modules/expansion/virustotal_public.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a public key and a low request rate limit. (More details about the API: [here](https://developers.virustotal.com/reference))
* [VMray](misp_modules/modules/expansion/vmray_submit.py) - a module to submit a sample to VMray.
* [VMware NSX](misp_modules/modules/expansion/vmware_nsx.py) - a module to enrich a file or URL with VMware NSX Defender.
* [VulnDB](misp_modules/modules/expansion/vulndb.py) - a module to query [VulnDB](https://www.riskbasedsecurity.com/).
* [Vulners](misp_modules/modules/expansion/vulners.py) - an expansion module to expand information about CVEs using Vulners API.
* [whois](misp_modules/modules/expansion/whois.py) - a module to query a local instance of [uwhois](https://github.com/rafiot/uwhoisd).
@ -127,12 +128,14 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj
## How to install and start MISP modules in a Python virtualenv? (recommended)
***Be sure to run the latest version of `pip`***. To install the latest version of pip, `pip install --upgrade pip` will do the job.
~~~~bash
sudo apt-get install python3-dev python3-pip libpq5 libjpeg-dev tesseract-ocr libpoppler-cpp-dev imagemagick virtualenv libopencv-dev zbar-tools libzbar0 libzbar-dev libfuzzy-dev build-essential -y
sudo -u www-data virtualenv -p python3 /var/www/MISP/venv
cd /usr/local/src/
chown -R www-data .
sudo git clone https://github.com/MISP/misp-modules.git
sudo chown -R www-data: .
sudo -u www-data git clone https://github.com/MISP/misp-modules.git
cd misp-modules
sudo -u www-data /var/www/MISP/venv/bin/pip install -I -r REQUIREMENTS
sudo -u www-data /var/www/MISP/venv/bin/pip install .
@ -140,14 +143,15 @@ sudo -u www-data /var/www/MISP/venv/bin/pip install .
sudo cp etc/systemd/system/misp-modules.service /etc/systemd/system/
sudo systemctl daemon-reload
sudo systemctl enable --now misp-modules
/var/www/MISP/venv/bin/misp-modules -l 127.0.0.1 -s & #to start the modules
sudo service misp-modules start #or
/var/www/MISP/venv/bin/misp-modules -l 127.0.0.1 & #to start the modules
~~~~
## How to install and start MISP modules on RHEL-based distributions ?
As of this writing, the official RHEL repositories only contain Ruby 2.0.0 and Ruby 2.1 or higher is required. As such, this guide installs Ruby 2.2 from the [SCL](https://access.redhat.com/documentation/en-us/red_hat_software_collections/3/html/3.2_release_notes/chap-installation#sect-Installation-Subscribe) repository.
~~~~bash
sudo yum install rh-ruby22
sudo yum install rh-python36 rh-ruby22
sudo yum install openjpeg-devel
sudo yum install rubygem-rouge rubygem-asciidoctor zbar-devel opencv-devel gcc-c++ pkgconfig poppler-cpp-devel python-devel redhat-rpm-config
cd /var/www/MISP
@ -168,7 +172,7 @@ After=misp-workers.service
Type=simple
User=apache
Group=apache
ExecStart=/usr/bin/scl enable rh-python36 rh-ruby22 '/var/www/MISP/venv/bin/misp-modules l 127.0.0.1 s'
ExecStart=/usr/bin/scl enable rh-python36 rh-ruby22 '/var/www/MISP/venv/bin/misp-modules -l 127.0.0.1'
Restart=always
RestartSec=10

View File

@ -1,117 +1,181 @@
-i https://pypi.org/simple
-e .
-e git+https://github.com/D4-project/BGP-Ranking.git/@fd9c0e03af9b61d4bf0b67ac73c7208a55178a54#egg=pybgpranking&subdirectory=client
-e git+https://github.com/D4-project/IPASN-History.git/@fc5e48608afc113e101ca6421bf693b7b9753f9e#egg=pyipasnhistory&subdirectory=client
-e git+https://github.com/MISP/PyIntel471.git@0df8d51f1c1425de66714b3a5a45edb69b8cc2fc#egg=pyintel471
-e git+https://github.com/MISP/PyMISP.git@bacd4c78cd83d3bf45dcf55cd9ad3514747ac985#egg=pymisp[fileobjects,openioc,pdfexport]
-e git+https://github.com/Rafiot/uwhoisd.git@783bba09b5a6964f25566089826a1be4b13f2a22#egg=uwhois&subdirectory=client
-e git+https://github.com/cartertemm/ODTReader.git/@49d6938693f6faa3ff09998f86dba551ae3a996b#egg=odtreader
-e git+https://github.com/sebdraven/pydnstrails@48c1f740025c51289f43a24863d1845ff12fd21a#egg=pydnstrails
-e git+https://github.com/sebdraven/pyonyphe@1ce15581beebb13e841193a08a2eb6f967855fcb#egg=pyonyphe
aiohttp==3.6.2; python_full_version >= '3.5.3'
antlr4-python3-runtime==4.8; python_version >= '3'
aiohttp==3.8.3
aiosignal==1.2.0 ; python_version >= '3.6'
antlr4-python3-runtime==4.9.3
anyio==3.6.1 ; python_full_version >= '3.6.2'
apiosintds==1.8.3
appdirs==1.4.4
argparse==1.4.0
assemblyline-client==4.0.1
async-timeout==3.0.1; python_full_version >= '3.5.3'
attrs==20.2.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
assemblyline-client==4.5.0
async-timeout==4.0.2 ; python_version >= '3.6'
asynctest==0.13.0 ; python_version < '3.8'
attrs==22.1.0 ; python_version >= '3.5'
backoff==2.1.2 ; python_version >= '3.7' and python_version < '4.0'
backports.zoneinfo==0.2.1 ; python_version < '3.9'
backscatter==0.2.4
beautifulsoup4==4.9.3
beautifulsoup4==4.11.1
bidict==0.22.0 ; python_version >= '3.7'
blockchain==1.4.4
certifi==2020.6.20
cffi==1.14.3
chardet==3.0.4
click-plugins==1.1.1
click==7.1.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
colorama==0.4.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
configparser==5.0.1; python_version >= '3.6'
cryptography==3.1.1
censys==2.1.8
certifi==2022.9.24 ; python_version >= '3.6'
cffi==1.15.1
chardet==5.0.0
charset-normalizer==2.1.1 ; python_full_version >= '3.6.0'
clamd==1.0.2
decorator==4.4.2
deprecated==1.2.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
dnspython==2.0.0
domaintools-api==0.5.2
click==8.1.3 ; python_version >= '3.7'
click-plugins==1.1.1
colorama==0.4.5 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
colorclass==2.2.2 ; python_version >= '2.6'
commonmark==0.9.1
compressed-rtf==1.0.6
configparser==5.3.0 ; python_version >= '3.7'
crowdstrike-falconpy==1.2.2
cryptography==38.0.1 ; python_version >= '3.6'
dateparser==1.1.1 ; python_version >= '3.5'
decorator==5.1.1 ; python_version >= '3.5'
deprecated==1.2.13 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
dnsdb2==1.1.4
dnspython==2.2.1
domaintools-api==1.0.1
easygui==0.98.3
ebcdic==1.1.1
enum-compat==0.0.3
ez-setup==0.9
et-xmlfile==1.1.0 ; python_version >= '3.6'
extract-msg==0.36.3
ezodf==0.3.2
future==0.18.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
futures==3.1.1
geoip2==4.1.0
httplib2==0.18.1
idna-ssl==1.1.0; python_version < '3.7'
idna==2.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
isodate==0.6.0
jbxapi==3.11.0
json-log-formatter==0.3.0
jsonschema==3.2.0
lief==0.10.1
lxml==4.5.2
filelock==3.8.0 ; python_version >= '3.7'
frozenlist==1.3.1 ; python_version >= '3.7'
future==0.18.2 ; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
geoip2==4.6.0
h11==0.12.0 ; python_version >= '3.6'
httpcore==0.15.0 ; python_version >= '3.7'
httplib2==0.20.4 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
httpx==0.23.0 ; python_version >= '3.7'
idna==3.4 ; python_version >= '3.5'
imapclient==2.3.1
importlib-metadata==4.12.0 ; python_version < '3.8'
importlib-resources==5.9.0 ; python_version < '3.9'
isodate==0.6.1
itsdangerous==2.1.2 ; python_version >= '3.7'
jaraco.classes==3.2.3 ; python_version >= '3.7'
jbxapi==3.18.0
jeepney==0.8.0 ; sys_platform == 'linux'
jinja2==3.1.2
json-log-formatter==0.5.1
jsonschema==4.16.0 ; python_version >= '3.7'
keyring==23.9.3 ; python_version >= '3.7'
lark-parser==0.12.0
lief==0.12.1
lxml==4.9.1
maclookup==1.0.3
markdownify==0.5.3
maxminddb==2.0.2; python_version >= '3.6'
multidict==4.7.6; python_version >= '3.5'
markupsafe==2.1.1 ; python_version >= '3.7'
mattermostdriver==7.3.2
maxminddb==2.2.0 ; python_version >= '3.6'
.
more-itertools==8.14.0 ; python_version >= '3.5'
msoffcrypto-tool==5.0.0 ; python_version >= '3' and platform_python_implementation != 'PyPy' or (platform_system != 'Windows' and platform_system != 'Darwin')
multidict==6.0.2 ; python_version >= '3.7'
mwdblib==4.3.1
ndjson==0.3.1
np==1.0.2
numpy==1.19.2; python_version >= '3.6'
numpy==1.21.6 ; python_version < '3.10' and platform_machine == 'aarch64'
oauth2==1.9.0.post1
opencv-python==4.4.0.44
pandas-ods-reader==0.0.7
pandas==1.1.3
passivetotal==1.0.31
pdftotext==2.1.5
pillow==7.2.0
progressbar2==3.53.1
psutil==5.7.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
pycparser==2.20; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
pycryptodome==3.9.8; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
pycryptodomex==3.9.8; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
pydeep==0.4
git+https://github.com/cartertemm/ODTReader.git/@49d6938693f6faa3ff09998f86dba551ae3a996b#egg=odtreader
olefile==0.46 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
oletools==0.60.1
opencv-python==4.6.0.66
openpyxl==3.0.10
packaging==21.3 ; python_version >= '3.6'
pandas==1.3.5
pandas-ods-reader==0.1.2
passivetotal==2.5.9
pcodedmp==1.2.6
pdftotext==2.2.2
pillow==9.2.0
pkgutil-resolve-name==1.3.10 ; python_version < '3.9'
progressbar2==4.0.0 ; python_full_version >= '3.7.0'
psutil==5.9.2 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
publicsuffixlist==0.8.0 ; python_version >= '2.6'
git+https://github.com/D4-project/BGP-Ranking.git/@68de39f6c5196f796055c1ac34504054d688aa59#egg=pybgpranking&subdirectory=client
pycparser==2.21
pycryptodome==3.15.0 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
pycryptodomex==3.15.0 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
pydeep2==0.5.1
git+https://github.com/sebdraven/pydnstrails@48c1f740025c51289f43a24863d1845ff12fd21a#egg=pydnstrails
pyeupi==1.1
pyfaup==1.2
pygeoip==0.3.2
pyopenssl==19.1.0
pyparsing==2.4.7; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
pypdns==1.5.1
pypssl==2.1
pyrsistent==0.17.3; python_version >= '3.5'
pytesseract==0.3.6
pygments==2.13.0 ; python_version >= '3.6'
git+https://github.com/MISP/PyIntel471.git@917272fafa8e12102329faca52173e90c5256968#egg=pyintel471
git+https://github.com/D4-project/IPASN-History.git/@a2853c39265cecdd0c0d16850bd34621c0551b87#egg=pyipasnhistory&subdirectory=client
pymisp[email,fileobjects,openioc,pdfexport,url]==2.4.162
git+https://github.com/sebdraven/pyonyphe@d1d6741f8ea4475f3bb77ff20c876f08839cabd1#egg=pyonyphe
pyparsing==2.4.7 ; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'
pypdns==1.5.2
pypssl==2.2
pyrsistent==0.18.1 ; python_version >= '3.7'
pytesseract==0.3.10
python-baseconv==1.2.2
python-dateutil==2.8.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
python-docx==0.8.10
python-engineio==3.13.2
python-magic==0.4.18
python-pptx==0.6.18
python-socketio[client]==4.6.0
python-utils==2.4.0
python-dateutil==2.8.2 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
python-docx==0.8.11
python-engineio==4.3.4 ; python_version >= '3.6'
python-magic==0.4.27
python-pptx==0.6.21
python-socketio[client]==5.7.1 ; python_version >= '3.6'
python-utils==3.3.3 ; python_version >= '3.7'
pytz==2019.3
pyyaml==5.3.1
pyzbar==0.1.8
pyzipper==0.3.3; python_version >= '3.5'
rdflib==5.0.0
redis==3.5.3; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
reportlab==3.5.53
requests-cache==0.5.2
requests[security]==2.24.0
shodan==1.23.1
sigmatools==0.18.1
six==1.15.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
pytz-deprecation-shim==0.1.0.post0 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
pyyaml==6.0 ; python_version >= '3.6'
pyzbar==0.1.9
pyzipper==0.3.6 ; python_version >= '3.5'
rdflib==6.2.0 ; python_version >= '3.7'
redis==4.3.4 ; python_version >= '3.6'
regex==2022.3.2 ; python_version >= '3.6'
reportlab==3.6.11
requests==2.28.1
requests-cache==0.6.4 ; python_version >= '3.6'
requests-file==1.5.1
rfc3986[idna2008]==1.5.0
rich==12.5.1 ; python_full_version >= '3.6.3' and python_full_version < '4.0.0'
rtfde==0.0.2
secretstorage==3.3.3 ; sys_platform == 'linux'
setuptools==65.4.0 ; python_version >= '3.7'
shodan==1.28.0
sigmatools==0.19.1
simplejson==3.17.6 ; python_version >= '2.5' and python_version not in '3.0, 3.1, 3.2, 3.3'
six==1.16.0 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
sniffio==1.3.0 ; python_version >= '3.7'
socialscan==1.4.2
socketio-client==0.5.7.4
soupsieve==2.0.1; python_version >= '3.0'
sparqlwrapper==1.8.5
stix2-patterns==1.3.1
tabulate==0.8.7
tornado==6.0.4; python_version >= '3.5'
trustar==0.3.33
tzlocal==2.1
soupsieve==2.3.2.post1 ; python_version >= '3.6'
sparqlwrapper==2.0.0
stix2==3.0.1
stix2-patterns==2.0.0
tabulate==0.8.10 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
tau-clients==0.2.9
taxii2-client==2.3.0
tldextract==3.3.1 ; python_version >= '3.7'
tornado==6.2 ; python_version >= '3.7'
tqdm==4.64.1 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
git+https://github.com/SteveClement/trustar-python.git@6954eae38e0c77eaeef26084b6c5fd033925c1c7#egg=trustar
typing-extensions==4.3.0 ; python_version < '3.8'
tzdata==2022.4 ; python_version >= '3.6'
tzlocal==4.2 ; python_version >= '3.6'
unicodecsv==0.14.1
url-normalize==1.4.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
url-normalize==1.4.3 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
urlarchiver==0.2
urllib3==1.25.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'
urllib3==1.26.12 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'
validators==0.14.0
vt-graph-api==1.0.1
vulners==1.5.8
wand==0.6.3
websocket-client==0.57.0
wrapt==1.12.1
xlrd==1.2.0
xlsxwriter==1.3.6
vt-graph-api==2.2.0
vt-py==0.17.1
vulners==2.0.4
wand==0.6.10
websocket-client==1.4.1 ; python_version >= '3.7'
websockets==10.3 ; python_version >= '3.7'
wrapt==1.14.1 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
xlrd==2.0.1
xlsxwriter==3.0.3 ; python_version >= '3.4'
yara-python==3.8.1
yarl==1.6.0; python_version >= '3.5'
yarl==1.8.1 ; python_version >= '3.7'
zipp==3.8.1 ; python_version >= '3.7'

BIN
docs/img/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

BIN
docs/img/misp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

120
docs/index.md Normal file
View File

@ -0,0 +1,120 @@
# Home
[![Build Status](https://travis-ci.org/MISP/misp-modules.svg?branch=master)](https://travis-ci.org/MISP/misp-modules)
[![Coverage Status](https://coveralls.io/repos/github/MISP/misp-modules/badge.svg?branch=master)](https://coveralls.io/github/MISP/misp-modules?branch=master)
[![codecov](https://codecov.io/gh/MISP/misp-modules/branch/master/graph/badge.svg)](https://codecov.io/gh/MISP/misp-modules)
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%MISP%2Fmisp-modules.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2FMISP%2Fmisp-modules?ref=badge_shield)
MISP modules are autonomous modules that can be used for expansion and other services in [MISP](https://github.com/MISP/MISP).
The modules are written in Python 3 following a simple API interface. The objective is to ease the extensions of MISP functionalities
without modifying core components. The API is available via a simple REST API which is independent from MISP installation or configuration.
MISP modules support is included in MISP starting from version `2.4.28`.
For more information: [Extending MISP with Python modules](https://www.circl.lu/assets/files/misp-training/switch2016/2-misp-modules.pdf) slides from MISP training.
## Existing MISP modules
### Expansion modules
* [Backscatter.io](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/backscatter_io.py) - a hover and expansion module to expand an IP address with mass-scanning observations.
* [BGP Ranking](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/bgpranking.py) - a hover and expansion module to expand an AS number with the ASN description, its history, and position in BGP Ranking.
* [BTC scam check](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/btc_scam_check.py) - An expansion hover module to instantly check if a BTC address has been abused.
* [BTC transactions](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/btc_steroids.py) - An expansion hover module to get a blockchain balance and the transactions from a BTC address in MISP.
* [CIRCL Passive DNS](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/circl_passivedns.py) - a hover and expansion module to expand hostname and IP addresses with passive DNS information.
* [CIRCL Passive SSL](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/circl_passivessl.py) - a hover and expansion module to expand IP addresses with the X.509 certificate seen.
* [countrycode](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/countrycode.py) - a hover module to tell you what country a URL belongs to.
* [CrowdStrike Falcon](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/crowdstrike_falcon.py) - an expansion module to expand using CrowdStrike Falcon Intel Indicator API.
* [CVE](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/cve.py) - a hover module to give more information about a vulnerability (CVE).
* [CVE advanced](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/cve_advanced.py) - An expansion module to query the CIRCL CVE search API for more information about a vulnerability (CVE).
* [Cuckoo submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/cuckoo_submit.py) - A hover module to submit malware sample, url, attachment, domain to Cuckoo Sandbox.
* [DBL Spamhaus](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/dbl_spamhaus.py) - a hover module to check Spamhaus DBL for a domain name.
* [DNS](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/dns.py) - a simple module to resolve MISP attributes like hostname and domain to expand IP addresses attributes.
* [docx-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/docx-enrich.py) - an enrichment module to get text out of Word document into MISP (using free-text parser).
* [DomainTools](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/domaintools.py) - a hover and expansion module to get information from [DomainTools](http://www.domaintools.com/) whois.
* [EUPI](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/eupi.py) - a hover and expansion module to get information about an URL from the [Phishing Initiative project](https://phishing-initiative.eu/?lang=en).
* [EQL](misp_modules/modules/expansion/eql.py) - an expansion module to generate event query language (EQL) from an attribute. [Event Query Language](https://eql.readthedocs.io/en/latest/)
* [Farsight DNSDB Passive DNS](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/farsight_passivedns.py) - a hover and expansion module to expand hostname and IP addresses with passive DNS information.
* [GeoIP](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/geoip_country.py) - a hover and expansion module to get GeoIP information from geolite/maxmind.
* [Greynoise](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/greynoise.py) - a hover to get information from greynoise.
* [hashdd](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/hashdd.py) - a hover module to check file hashes against [hashdd.com](http://www.hashdd.com) including NSLR dataset.
* [hibp](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/hibp.py) - a hover module to lookup against Have I Been Pwned?
* [intel471](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/intel471.py) - an expansion module to get info from [Intel471](https://intel471.com).
* [IPASN](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/ipasn.py) - a hover and expansion to get the BGP ASN of an IP address.
* [iprep](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/iprep.py) - an expansion module to get IP reputation from packetmail.net.
* [Joe Sandbox submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_submit.py) - Submit files and URLs to Joe Sandbox.
* [Joe Sandbox query](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_query.py) - Query Joe Sandbox with the link of an analysis and get the parsed data.
* [macaddress.io](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/macaddress_io.py) - a hover module to retrieve vendor details and other information regarding a given MAC address or an OUI from [MAC address Vendor Lookup](https://macaddress.io). See [integration tutorial here](https://macaddress.io/integrations/MISP-module).
* [macvendors](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/macvendors.py) - a hover module to retrieve mac vendor information.
* [ocr-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/ocr-enrich.py) - an enrichment module to get OCRized data from images into MISP.
* [ods-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/ods-enrich.py) - an enrichment module to get text out of OpenOffice spreadsheet document into MISP (using free-text parser).
* [odt-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/odt-enrich.py) - an enrichment module to get text out of OpenOffice document into MISP (using free-text parser).
* [onyphe](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/onyphe.py) - a modules to process queries on Onyphe.
* [onyphe_full](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/onyphe_full.py) - a modules to process full queries on Onyphe.
* [OTX](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/otx.py) - an expansion module for [OTX](https://otx.alienvault.com/).
* [passivetotal](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/passivetotal.py) - a [passivetotal](https://www.passivetotal.org/) module that queries a number of different PassiveTotal datasets.
* [pdf-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/pdf-enrich.py) - an enrichment module to extract text from PDF into MISP (using free-text parser).
* [pptx-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/pptx-enrich.py) - an enrichment module to get text out of PowerPoint document into MISP (using free-text parser).
* [qrcode](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/qrcode.py) - a module decode QR code, barcode and similar codes from an image and enrich with the decoded values.
* [rbl](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/rbl.py) - a module to get RBL (Real-Time Blackhost List) values from an attribute.
* [reversedns](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/reversedns.py) - Simple Reverse DNS expansion service to resolve reverse DNS from MISP attributes.
* [securitytrails](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/securitytrails.py) - an expansion module for [securitytrails](https://securitytrails.com/).
* [shodan](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/shodan.py) - a minimal [shodan](https://www.shodan.io/) expansion module.
* [Sigma queries](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/sigma_queries.py) - Experimental expansion module querying a sigma rule to convert it into all the available SIEM signatures.
* [Sigma syntax validator](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/sigma_syntax_validator.py) - Sigma syntax validator.
* [sourcecache](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/sourcecache.py) - a module to cache a specific link from a MISP instance.
* [STIX2 pattern syntax validator](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/stix2_pattern_syntax_validator.py) - a module to check a STIX2 pattern syntax.
* [ThreatCrowd](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/threatcrowd.py) - an expansion module for [ThreatCrowd](https://www.threatcrowd.org/).
* [threatminer](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/threatminer.py) - an expansion module to expand from [ThreatMiner](https://www.threatminer.org/).
* [urlhaus](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/urlhaus.py) - Query urlhaus to get additional data about a domain, hash, hostname, ip or url.
* [urlscan](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/urlscan.py) - an expansion module to query [urlscan.io](https://urlscan.io).
* [virustotal](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/virustotal.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a high request rate limit required. (More details about the API: [here](https://developers.virustotal.com/reference))
* [virustotal_public](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/virustotal_public.py) - an expansion module to query the [VirusTotal](https://www.virustotal.com/gui/home) API with a public key and a low request rate limit. (More details about the API: [here](https://developers.virustotal.com/reference))
* [VMray](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/vmray_submit.py) - a module to submit a sample to VMray.
* [VulnDB](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/vulndb.py) - a module to query [VulnDB](https://www.riskbasedsecurity.com/).
* [Vulners](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/vulners.py) - an expansion module to expand information about CVEs using Vulners API.
* [whois](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/whois.py) - a module to query a local instance of [uwhois](https://github.com/rafiot/uwhoisd).
* [wikidata](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/wiki.py) - a [wikidata](https://www.wikidata.org) expansion module.
* [xforce](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/xforceexchange.py) - an IBM X-Force Exchange expansion module.
* [xlsx-enrich](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/xlsx-enrich.py) - an enrichment module to get text out of an Excel document into MISP (using free-text parser).
* [YARA query](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/yara_query.py) - a module to create YARA rules from single hash attributes.
* [YARA syntax validator](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/yara_syntax_validator.py) - YARA syntax validator.
### Export modules
* [CEF](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/cef_export.py) module to export Common Event Format (CEF).
* [Cisco FireSight Manager ACL rule](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/cisco_firesight_manager_ACL_rule_export.py) module to export as rule for the Cisco FireSight manager ACL.
* [GoAML export](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/goamlexport.py) module to export in [GoAML format](http://goaml.unodc.org/goaml/en/index.html).
* [Lite Export](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/liteexport.py) module to export a lite event.
* [Mass EQL Export](misp_modules/modules/export_mod/mass_eql_export.py) module to export applicable attributes from an event to a mass EQL query.
* [PDF export](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/pdfexport.py) module to export an event in PDF.
* [Nexthink query format](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/nexthinkexport.py) module to export in Nexthink query format.
* [osquery](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/osqueryexport.py) module to export in [osquery](https://osquery.io/) query format.
* [ThreatConnect](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/threat_connect_export.py) module to export in ThreatConnect CSV format.
* [ThreatStream](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/export_mod/threatStream_misp_export.py) module to export in ThreatStream format.
### Import modules
* [CSV import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/csvimport.py) Customizable CSV import module.
* [Cuckoo JSON](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/cuckooimport.py) Cuckoo JSON import.
* [Email Import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/email_import.py) Email import module for MISP to import basic metadata.
* [GoAML import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/goamlimport.py) Module to import [GoAML](http://goaml.unodc.org/goaml/en/index.html) XML format.
* [Joe Sandbox import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/joe_import.py) Parse data from a Joe Sandbox json report.
* [OCR](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/ocr.py) Optical Character Recognition (OCR) module for MISP to import attributes from images, scan or faxes.
* [OpenIOC](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/openiocimport.py) OpenIOC import based on PyMISP library.
* [ThreatAnalyzer](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/threatanalyzer_import.py) - An import module to process ThreatAnalyzer archive.zip/analysis.json sandbox exports.
* [VMRay](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/vmray_import.py) - An import module to process VMRay export.
## How to contribute your own module?
Fork the project, add your module, test it and make a pull-request. Modules can be also private as you can add a module in your own MISP installation.
For further information please see [Contribute](contribute/).
## Licenses
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%MISP%2Fmisp-modules.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2FMISP%2Fmisp-modules?ref=badge_large)
For further Information see also the [license file](license/).

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 148 KiB

View File

@ -0,0 +1,125 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="200mm"
height="200mm"
viewBox="0 0 200 200"
version="1.1"
id="svg5004"
inkscape:export-filename="/home/adulau/git/misp-modules/docs/logos/misp-modules-full.png"
inkscape:export-xdpi="300"
inkscape:export-ydpi="300"
inkscape:version="0.92.5 (2060ec1f9f, 2020-04-08)"
sodipodi:docname="misp-modules-full.svg">
<defs
id="defs4998" />
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="0.35"
inkscape:cx="608.07786"
inkscape:cy="468.57143"
inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="false"
inkscape:window-width="1494"
inkscape:window-height="858"
inkscape:window-x="85"
inkscape:window-y="94"
inkscape:window-maximized="0" />
<metadata
id="metadata5001">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(0,-97)">
<path
id="path13429-79"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0"
d="m 164.77224,130.28857 -36.0861,12.64813 28.99649,24.92756 36.0861,-12.64812 z" />
<path
id="path13431-93"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0"
d="m 157.68263,167.86426 -7.08952,37.57568 -28.99649,-24.92756 7.08952,-37.57568 z" />
<path
id="path13433-2"
sodipodi:nodetypes="ccccc"
d="m 157.68263,167.86426 -7.08947,37.57566 36.08609,-12.64815 7.08954,-37.5756 z"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0" />
<path
id="path13429-1-3"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0"
d="m 73.247659,124.68112 -37.48957,-7.53084 12.222724,36.23233 37.48956,7.53084 z" />
<path
id="path13431-9-7"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0"
d="M 47.980813,153.38261 22.713972,182.08416 10.491268,145.85178 35.758089,117.15028 Z" />
<path
id="path13433-0-1"
sodipodi:nodetypes="ccccc"
d="m 47.980813,153.38261 -25.266857,28.70162 37.489568,7.53084 25.266907,-28.70153 z"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0" />
<path
id="path13429-9-2"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0"
d="m 108.76237,205.17588 -38.207108,1.54817 20.444152,32.31429 38.207146,-1.54817 z" />
<path
id="path13431-8-2"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0"
d="M 90.999414,239.03834 73.236473,272.90088 52.792296,240.5865 70.555262,206.72405 Z" />
<path
id="path13433-85-0"
sodipodi:nodetypes="ccccc"
d="m 90.999414,239.03834 -17.762941,33.86258 38.207127,-1.54817 17.76296,-33.86251 z"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;opacity:1"
inkscape:connector-curvature="0" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:31.40091705px;line-height:1.25;font-family:AnjaliOldLipi;-inkscape-font-specification:'AnjaliOldLipi, Normal';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.78502285;"
x="1.889612"
y="292.74222"
id="text4996"><tspan
sodipodi:role="line"
id="tspan4994"
x="1.889612"
y="292.74222"
style="stroke-width:0.78502285;fill:#000000;">misp-modules</tspan></text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 5.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.8 KiB

114
docs/logos/misp-modules.svg Normal file
View File

@ -0,0 +1,114 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="200mm"
height="200mm"
viewBox="0 0 200 200"
version="1.1"
id="svg5004"
inkscape:export-filename="/home/adulau/misp-modules.png"
inkscape:export-xdpi="300"
inkscape:export-ydpi="300"
inkscape:version="0.92.5 (2060ec1f9f, 2020-04-08)"
sodipodi:docname="misp-modules.svg">
<defs
id="defs4998" />
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="0.35"
inkscape:cx="608.07786"
inkscape:cy="468.57143"
inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="false"
inkscape:window-width="1494"
inkscape:window-height="858"
inkscape:window-x="102"
inkscape:window-y="97"
inkscape:window-maximized="0" />
<metadata
id="metadata5001">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(0,-97)">
<path
id="path13429-79"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0"
d="m 164.77224,130.28857 -36.0861,12.64813 28.99649,24.92756 36.0861,-12.64812 z" />
<path
id="path13431-93"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0"
d="m 157.68263,167.86426 -7.08952,37.57568 -28.99649,-24.92756 7.08952,-37.57568 z" />
<path
id="path13433-2"
sodipodi:nodetypes="ccccc"
d="m 157.68263,167.86426 -7.08947,37.57566 36.08609,-12.64815 7.08954,-37.5756 z"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0" />
<path
id="path13429-1-3"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0"
d="m 73.247659,124.68112 -37.48957,-7.53084 12.222724,36.23233 37.48956,7.53084 z" />
<path
id="path13431-9-7"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0"
d="M 47.980813,153.38261 22.713972,182.08416 10.491268,145.85178 35.758089,117.15028 Z" />
<path
id="path13433-0-1"
sodipodi:nodetypes="ccccc"
d="m 47.980813,153.38261 -25.266857,28.70162 37.489568,7.53084 25.266907,-28.70153 z"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0" />
<path
id="path13429-9-2"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0"
d="m 108.76237,205.17588 -38.207108,1.54817 20.444152,32.31429 38.207146,-1.54817 z" />
<path
id="path13431-8-2"
sodipodi:nodetypes="ccccc"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0"
d="M 90.999414,239.03834 73.236473,272.90088 52.792296,240.5865 70.555262,206.72405 Z" />
<path
id="path13433-85-0"
sodipodi:nodetypes="ccccc"
d="m 90.999414,239.03834 -17.762941,33.86258 38.207127,-1.54817 17.76296,-33.86251 z"
style="fill:none;stroke:#000000;stroke-width:3.43263125;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:connector-curvature="0" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

View File

@ -606,18 +606,19 @@ Module to query a local copy of Maxmind's Geolite database.
<img src=logos/greynoise.png height=60>
Module to access GreyNoise.io API
Module to query IP and CVE information from GreyNoise
- **features**:
>The module takes an IP address as input and queries Greynoise for some additional information about it: basically it checks whether a given IP address is “Internet background noise”, or has been observed scanning or attacking devices across the Internet. The result is returned as text.
>This module supports: 1) Query an IP from GreyNoise to see if it is internet background noise or a common business service 2) Query a CVE from GreyNoise to see the total number of internet scanners looking for the CVE in the last 7 days.
- **input**:
>An IP address.
>An IP address or CVE ID
- **output**:
>Additional information about the IP fetched from Greynoise API.
>IP Lookup information or CVE scanning profile for past 7 days
- **references**:
> - https://greynoise.io/
> - https://github.com/GreyNoise-Intelligence/api.greynoise.io
> - https://docs.greyniose.io/
> - https://www.greynoise.io/viz/account/
- **requirements**:
>A Greynoise API key.
>A Greynoise API key. Both Enterprise (Paid) and Community (Free) API keys are supported, however Community API users will only be able to perform IP lookups.
-----
@ -635,6 +636,25 @@ A hover module to check hashes against hashdd.com including NSLR dataset.
-----
#### [hashlookup](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/hashlookup.py)
<img src=logos/circl.png height=60>
An expansion module to query the CIRCL hashlookup services to find it if a hash is part of a known set such as NSRL.
- **features**:
>The module takes file hashes as input such as a MD5 or SHA1.
> It queries the public CIRCL.lu hashlookup service and return all the hits if the hashes are known in an existing dataset. The module can be configured with a custom hashlookup url if required.
> The module can be used an hover module but also an expansion model to add related MISP objects.
>
- **input**:
>File hashes (MD5, SHA1)
- **output**:
>Object with the filename associated hashes if the hash is part of a known set.
- **references**:
>https://www.circl.lu/services/hashlookup/
-----
#### [hibp](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/hibp.py)
<img src=logos/hibp.png height=60>
@ -734,6 +754,26 @@ Module to query an IP ASN history service (https://github.com/D4-project/IPASN-H
-----
#### [ipqs_fraud_and_risk_scoring](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/ipqs_fraud_and_risk_scoring.py)
<img src=logos/ipqualityscore.png height=60>
IPQualityScore MISP Expansion Module for IP reputation, Email Validation, Phone Number Validation, Malicious Domain and Malicious URL Scanner.
- **features**:
>This Module takes the IP Address, Domain, URL, Email and Phone Number MISP Attributes as input to query the IPQualityScore API.
> The results of the IPQualityScore API are than returned as IPQS Fraud and Risk Scoring Object.
> The object contains a copy of the enriched attribute with added tags presenting the verdict based on fraud score,risk score and other attributes from IPQualityScore.
- **input**:
>A MISP attribute of type IP Address(ip-src, ip-dst), Domain(hostname, domain), URL(url, uri), Email Address(email, email-src, email-dst, target-email, whois-registrant-email) and Phone Number(phone-number, whois-registrant-phone).
- **output**:
>IPQualityScore object, resulting from the query on the IPQualityScore API.
- **references**:
>https://www.ipqualityscore.com/
- **requirements**:
>A IPQualityScore API Key.
-----
#### [iprep](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/iprep.py)
Module to query IPRep data for IP addresses.
@ -802,6 +842,8 @@ A module to submit files or URLs to Joe Sandbox for an advanced analysis, and re
<img src=logos/lastline.png height=60>
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
Query Lastline with an analysis link and parse the report into MISP attributes and objects.
The analysis link can also be retrieved from the output of the [lastline_submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/lastline_submit.py) expansion module.
- **features**:
@ -821,6 +863,8 @@ The analysis link can also be retrieved from the output of the [lastline_submit]
<img src=logos/lastline.png height=60>
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
Module to submit a file or URL to Lastline.
- **features**:
>The module requires a Lastline Analysis `api_token` and `key`.
@ -892,6 +936,39 @@ Query the MALWAREbazaar API to get additional information about the input hash a
-----
#### [mmdb_lookup](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/mmdb_lookup.py)
<img src=logos/circl.png height=60>
A hover and expansion module to enrich an ip with geolocation and ASN information from an mmdb server instance, such as CIRCL's ip.circl.lu.
- **features**:
>The module takes an IP address related attribute as input.
> It queries the public CIRCL.lu mmdb-server instance, available at ip.circl.lu, by default. The module can be configured with a custom mmdb server url if required.
> It is also possible to filter results on 1 db_source by configuring db_source_filter.
- **input**:
>An IP address attribute (for example ip-src or ip-src|port).
- **output**:
>Geolocation and asn objects.
- **references**:
> - https://data.public.lu/fr/datasets/geo-open-ip-address-geolocation-per-country-in-mmdb-format/
> - https://github.com/adulau/mmdb-server
-----
#### [mwdb](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/mwdb.py)
Module to push malware samples to a MWDB instance
- **features**:
>An expansion module to push malware samples to a MWDB (https://github.com/CERT-Polska/mwdb-core) instance. This module does not push samples to a sandbox. This can be achieved via Karton (connected to the MWDB). Does: * Upload of attachment or malware sample to MWDB * Tags of events and/or attributes are added to MWDB. * Comment of the MISP attribute is added to MWDB. * A link back to the MISP event is added to MWDB via the MWDB attribute. * A link to the MWDB attribute is added as an enrichted attribute to the MISP event.
- **input**:
>Attachment or malware sample
- **output**:
>Link attribute that points to the sample at the MWDB instane
- **requirements**:
>* mwdblib installed (pip install mwdblib) ; * (optional) keys.py file to add tags of events/attributes to MWDB * (optional) MWDB attribute created for the link back to MISP (defined in mwdb_misp_attribute)
-----
#### [ocr_enrich](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/ocr_enrich.py)
Module to process some optical character recognition on pictures.
@ -1016,6 +1093,25 @@ Module to get information from AlienVault OTX.
-----
#### [passivessh](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/passivessh.py)
<img src=logos/passivessh.png height=60>
An expansion module to query the CIRCL Passive SSH.
- **features**:
>The module queries the Passive SSH service from CIRCL.
>
> The module can be used an hover module but also an expansion model to add related MISP objects.
>
- **input**:
>IP addresses or SSH fingerprints
- **output**:
>SSH key materials, complementary IP addresses with similar SSH key materials
- **references**:
>https://github.com/D4-project/passive-ssh
-----
#### [passivetotal](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/passivetotal.py)
<img src=logos/passivetotal.png height=60>
@ -1099,6 +1195,24 @@ Module to extract freetext from a .pptx document.
-----
#### [qintel_qsentry](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/qintel_qsentry.py)
<img src=logos/qintel.png height=60>
A hover and expansion module which queries Qintel QSentry for ip reputation data
- **features**:
>This module takes an ip-address (ip-src or ip-dst) attribute as input, and queries the Qintel QSentry API to retrieve ip reputation data
- **input**:
>ip address attribute
- **ouput**:
>Objects containing the enriched IP, threat tags, last seen attributes and associated Autonomous System information
- **references**:
>https://www.qintel.com/products/qsentry/
- **requirements**:
>A Qintel API token
-----
#### [qrcode](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/qrcode.py)
Module to decode QR codes.
@ -1567,6 +1681,26 @@ Module to submit a sample to VMRay.
-----
#### [vmware_nsx](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/vmware_nsx.py)
<img src=logos/vmware_nsx.png height=60>
Module to enrich a file or URL with VMware NSX Defender.
- **features**:
>This module takes an IoC such as file hash, file attachment, malware-sample or url as input to query VMware NSX Defender.
>
>The IoC is then enriched with data from VMware NSX Defender.
- **input**:
>File hash, attachment or URL to be enriched with VMware NSX Defender.
- **output**:
>Objects and tags generated by VMware NSX Defender.
- **references**:
>https://www.vmware.com
- **requirements**:
>The module requires a VMware NSX Defender Analysis `api_token` and `key`.
-----
#### [vulndb](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/vulndb.py)
<img src=logos/vulndb.png height=60>
@ -1720,6 +1854,26 @@ An expansion hover module to perform a syntax check on if yara rules are valid o
-----
#### [yeti](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/expansion/yeti.py)
<img src=logos/yeti.png height=60>
Module to process a query on Yeti.
- **features**:
>This module add context and links between observables using yeti
- **input**:
>A domain, hostname,IP, sha256,sha1, md5, url of MISP attribute.
- **output**:
>MISP attributes and objects fetched from the Yeti instances.
- **references**:
> - https://github.com/yeti-platform/yeti
> - https://github.com/sebdraven/pyeti
- **requirements**:
> - pyeti
> - API key
-----
## Export Modules
#### [cef_export](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/cef_export.py)
@ -1753,6 +1907,22 @@ Module to export malicious network activity attributes to Cisco fireSIGHT manage
-----
#### [defender_endpoint_export](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/defender_endpoint_export.py)
<img src=logos/defender_endpoint.png height=60>
Defender for Endpoint KQL hunting query export module
- **features**:
>This module export an event as Defender for Endpoint KQL queries that can then be used in your own python3 or Powershell tool. If you are using Microsoft Sentinel, you can directly connect your MISP instance to Sentinel and then create queries using the `ThreatIntelligenceIndicator` table to match events against imported IOC.
- **input**:
>MISP Event attributes
- **output**:
>Defender for Endpoint KQL queries
- **references**:
>https://docs.microsoft.com/en-us/windows/security/threat-protection/microsoft-defender-atp/advanced-hunting-schema-reference
-----
#### [goamlexport](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/goamlexport.py)
<img src=logos/goAML.jpg height=60>
@ -1857,7 +2027,7 @@ Simple export of a MISP event to PDF.
> 'Activate_galaxy_description' is a boolean (True or void) to activate the description of event related galaxies.
> 'Activate_related_events' is a boolean (True or void) to activate the description of related event. Be aware this might leak information on confidential events linked to the current event !
> 'Activate_internationalization_fonts' is a boolean (True or void) to activate Noto fonts instead of default fonts (Helvetica). This allows the support of CJK alphabet. Be sure to have followed the procedure to download Noto fonts (~70Mo) in the right place (/tools/pdf_fonts/Noto_TTF), to allow PyMisp to find and use them during PDF generation.
> 'Custom_fonts_path' is a text (path or void) to the TTF file of your choice, to create the PDF with it. Be aware the PDF won't support bold/italic/special style anymore with this option
> 'Custom_fonts_path' is a text (path or void) to the TTF file of your choice, to create the PDF with it. Be aware the PDF won't support bold/italic/special style anymore with this option
- **input**:
>MISP Event
- **output**:
@ -1914,6 +2084,25 @@ Module to export a structured CSV file for uploading to ThreatConnect.
-----
#### [virustotal_collections](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/virustotal_collections.py)
<img src=logos/virustotal.png height=60>
Creates a VT Collection from an event iocs.
- **features**:
>This export module which takes advantage of a new endpoint in VT APIv3 to create VT Collections from IOCs contained in a MISP event. With this module users will be able to create a collection just using the Download as... button.
- **input**:
>A domain, hash (md5, sha1, sha256 or sha512), hostname, url or IP address attribute.
- **output**:
>A VirusTotal collection in VT.
- **references**:
> - https://www.virustotal.com/
> - https://blog.virustotal.com/2021/11/introducing-virustotal-collections.html
- **requirements**:
>An access to the VirusTotal API (apikey).
-----
#### [vt_graph](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/export_mod/vt_graph.py)
<img src=logos/virustotal.png height=60>
@ -1936,6 +2125,22 @@ This module is used to create a VirusTotal Graph from a MISP event.
## Import Modules
#### [cof2misp](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/import_mod/cof2misp.py)
Passive DNS Common Output Format (COF) MISP importer
- **features**:
>Takes as input a valid COF file or the output of the dnsdbflex utility and creates MISP objects for the input.
- **input**:
>Passive DNS output in Common Output Format (COF)
- **output**:
>MISP objects
- **references**:
>https://tools.ietf.org/id/draft-dulaunoy-dnsop-passive-dns-cof-08.html
- **requirements**:
>PyMISP
-----
#### [csvimport](https://github.com/MISP/misp-modules/tree/main/misp_modules/modules/import_mod/csvimport.py)
Module to import MISP attributes from a csv file.
@ -2028,6 +2233,8 @@ A module to import data from a Joe Sandbox analysis json report.
<img src=logos/lastline.png height=60>
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
Module to import and parse reports from Lastline analysis links.
- **features**:
>The module requires a Lastline Portal `username` and `password`.

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import os
import json
import sys
module_types = ['expansion', 'export_mod', 'import_mod']
titles = ['Expansion Modules', 'Export Modules', 'Import Modules']
@ -17,6 +18,7 @@ def generate_doc(module_type, root_path, logo_path='logos'):
githubref = f'{githubpath}/{modulename}.py'
markdown.append(f'\n#### [{modulename}]({githubref})\n')
filename = os.path.join(current_path, filename)
print(f'Processing {filename}')
with open(filename, 'rt') as f:
definition = json.loads(f.read())
if 'logo' in definition:

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 648 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 112 KiB

After

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 309 KiB

View File

@ -14,7 +14,8 @@ sudo apt-get install -y \
zbar-tools \
libzbar0 \
libzbar-dev \
libfuzzy-dev
libfuzzy-dev \
libcaca-dev
# BEGIN with virtualenv:
$SUDO_WWW virtualenv -p python3 /var/www/MISP/venv

View File

@ -11,4 +11,4 @@
"https://docs.dnsdb.info/dnsdb-api/"
],
"features": "This module takes a domain, hostname or IP address MISP attribute as input to query the Farsight Passive DNS API.\n The results of rdata and rrset lookups are then returned and parsed into passive-dns objects.\n\nAn API key is required to submit queries to the API.\n It is also possible to define a custom server URL, and to set a limit of results to get.\n This limit is set for each lookup, which means we can have an up to the limit number of passive-dns objects resulting from an rdata query about an IP address, but an up to the limit number of passive-dns objects for each lookup queries about a domain or a hostname (== twice the limit)."
}
}

View File

@ -1,14 +1,15 @@
{
"description": "Module to access GreyNoise.io API",
"description": "Module to query IP and CVE information from GreyNoise",
"logo": "greynoise.png",
"requirements": [
"A Greynoise API key."
"A Greynoise API key. Both Enterprise (Paid) and Community (Free) API keys are supported, however Community API users will only be able to perform IP lookups."
],
"input": "An IP address.",
"output": "Additional information about the IP fetched from Greynoise API.",
"input": "An IP address or CVE ID",
"output": "IP Lookup information or CVE scanning profile for past 7 days",
"references": [
"https://greynoise.io/",
"https://github.com/GreyNoise-Intelligence/api.greynoise.io"
"https://docs.greyniose.io/",
"https://www.greynoise.io/viz/account/"
],
"features": "The module takes an IP address as input and queries Greynoise for some additional information about it: basically it checks whether a given IP address is \u201cInternet background noise\u201d, or has been observed scanning or attacking devices across the Internet. The result is returned as text."
"features": "This module supports: 1) Query an IP from GreyNoise to see if it is internet background noise or a common business service 2) Query a CVE from GreyNoise to see the total number of internet scanners looking for the CVE in the last 7 days."
}

View File

@ -0,0 +1,10 @@
{
"description": "An expansion module to query the CIRCL hashlookup services to find it if a hash is part of a known set such as NSRL.",
"logo": "circl.png",
"input": "File hashes (MD5, SHA1)",
"output": "Object with the filename associated hashes if the hash is part of a known set.",
"references": [
"https://www.circl.lu/services/hashlookup/"
],
"features": "The module takes file hashes as input such as a MD5 or SHA1.\n It queries the public CIRCL.lu hashlookup service and return all the hits if the hashes are known in an existing dataset. The module can be configured with a custom hashlookup url if required.\n The module can be used an hover module but also an expansion model to add related MISP objects.\n"
}

View File

@ -0,0 +1,13 @@
{
"description": "HYAS Insight integration to MISP provides direct, high volume access to HYAS Insight data. It enables investigators and analysts to understand and defend against cyber adversaries and their infrastructure.",
"logo": "hyas.png",
"requirements": [
"A HYAS Insight API Key."
],
"input": "A MISP attribute of type IP Address(ip-src, ip-dst), Domain(hostname, domain), Email Address(email, email-src, email-dst, target-email, whois-registrant-email), Phone Number(phone-number, whois-registrant-phone), MDS(md5, x509-fingerprint-md5, ja3-fingerprint-md5, hassh-md5, hasshserver-md5), SHA1(sha1, x509-fingerprint-sha1), SHA256(sha256, x509-fingerprint-sha256), SHA512(sha512)",
"output": "Hyas Insight objects, resulting from the query on the HYAS Insight API.",
"references": [
"https://www.hyas.com/hyas-insight/"
],
"features": "This Module takes the IP Address, Domain, URL, Email, Phone Number, MD5, SHA1, Sha256, SHA512 MISP Attributes as input to query the HYAS Insight API.\n The results of the HYAS Insight API are than are then returned and parsed into Hyas Insight Objects. \n\nAn API key is required to submit queries to the HYAS Insight API.\n"
}

View File

@ -0,0 +1,13 @@
{
"description": "IPQualityScore MISP Expansion Module for IP reputation, Email Validation, Phone Number Validation, Malicious Domain and Malicious URL Scanner.",
"logo": "ipqualityscore.png",
"requirements": [
"A IPQualityScore API Key."
],
"input": "A MISP attribute of type IP Address(ip-src, ip-dst), Domain(hostname, domain), URL(url, uri), Email Address(email, email-src, email-dst, target-email, whois-registrant-email) and Phone Number(phone-number, whois-registrant-phone).",
"output": "IPQualityScore object, resulting from the query on the IPQualityScore API.",
"references": [
"https://www.ipqualityscore.com/"
],
"features": "This Module takes the IP Address, Domain, URL, Email and Phone Number MISP Attributes as input to query the IPQualityScore API.\n The results of the IPQualityScore API are than returned as IPQS Fraud and Risk Scoring Object. \n The object contains a copy of the enriched attribute with added tags presenting the verdict based on fraud score,risk score and other attributes from IPQualityScore."
}

View File

@ -1,5 +1,5 @@
{
"description": "Query Lastline with an analysis link and parse the report into MISP attributes and objects.\nThe analysis link can also be retrieved from the output of the [lastline_submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/lastline_submit.py) expansion module.",
"description": "Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.\n\nQuery Lastline with an analysis link and parse the report into MISP attributes and objects.\nThe analysis link can also be retrieved from the output of the [lastline_submit](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/lastline_submit.py) expansion module.",
"logo": "lastline.png",
"requirements": [],
"input": "Link to a Lastline analysis.",

View File

@ -1,5 +1,5 @@
{
"description": "Module to submit a file or URL to Lastline.",
"description": "Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.\n\nModule to submit a file or URL to Lastline.",
"logo": "lastline.png",
"requirements": [],
"input": "File or URL to submit to Lastline.",

View File

@ -0,0 +1,11 @@
{
"description": "A hover and expansion module to enrich an ip with geolocation and ASN information from an mmdb server instance, such as CIRCL's ip.circl.lu.",
"logo": "circl.png",
"input": "An IP address attribute (for example ip-src or ip-src|port).",
"output": "Geolocation and asn objects.",
"references": [
"https://data.public.lu/fr/datasets/geo-open-ip-address-geolocation-per-country-in-mmdb-format/",
"https://github.com/adulau/mmdb-server"
],
"features": "The module takes an IP address related attribute as input.\n It queries the public CIRCL.lu mmdb-server instance, available at ip.circl.lu, by default. The module can be configured with a custom mmdb server url if required.\n It is also possible to filter results on 1 db_source by configuring db_source_filter."
}

View File

@ -0,0 +1,11 @@
{
"description": "Module to push malware samples to a MWDB instance",
"requirements": [
"* mwdblib installed (pip install mwdblib) ; * (optional) keys.py file to add tags of events/attributes to MWDB * (optional) MWDB attribute created for the link back to MISP (defined in mwdb_misp_attribute)"
],
"input": "Attachment or malware sample",
"output": "Link attribute that points to the sample at the MWDB instane",
"references": [
],
"features": "An expansion module to push malware samples to a MWDB (https://github.com/CERT-Polska/mwdb-core) instance. This module does not push samples to a sandbox. This can be achieved via Karton (connected to the MWDB). Does: * Upload of attachment or malware sample to MWDB * Tags of events and/or attributes are added to MWDB. * Comment of the MISP attribute is added to MWDB. * A link back to the MISP event is added to MWDB via the MWDB attribute. * A link to the MWDB attribute is added as an enrichted attribute to the MISP event."
}

View File

@ -0,0 +1,10 @@
{
"description": "An expansion module to query the CIRCL Passive SSH.",
"logo": "passivessh.png",
"input": "IP addresses or SSH fingerprints",
"output": "SSH key materials, complementary IP addresses with similar SSH key materials",
"references": [
"https://github.com/D4-project/passive-ssh"
],
"features": "The module queries the Passive SSH service from CIRCL.\n \n The module can be used an hover module but also an expansion model to add related MISP objects.\n"
}

View File

@ -0,0 +1,13 @@
{
"description": "A hover and expansion module which queries Qintel QSentry for ip reputation data",
"logo": "qintel.png",
"requirements": [
"A Qintel API token"
],
"input": "ip address attribute",
"ouput": "Objects containing the enriched IP, threat tags, last seen attributes and associated Autonomous System information",
"features": "This module takes an ip-address (ip-src or ip-dst) attribute as input, and queries the Qintel QSentry API to retrieve ip reputation data",
"references": [
"https://www.qintel.com/products/qsentry/"
]
}

View File

@ -0,0 +1,14 @@
{
"description": "Module to enrich a file or URL with VMware NSX Defender.",
"logo": "vmware_nsx.png",
"requirements": [
"The module requires a VMware NSX Defender Analysis `api_token` and `key`."
],
"input": "File hash, attachment or URL to be enriched with VMware NSX Defender.",
"output": "Objects and tags generated by VMware NSX Defender.",
"references": [
"https://www.vmware.com"
],
"features": "This module takes an IoC such as file hash, file attachment, malware-sample or url as input to query VMware NSX Defender.\n\nThe IoC is then enriched with data from VMware NSX Defender."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to process a query on Yeti.",
"logo": "yeti.png",
"requirements": ["pyeti", "API key "],
"input": "A domain, hostname,IP, sha256,sha1, md5, url of MISP attribute.",
"output": "MISP attributes and objects fetched from the Yeti instances.",
"references": ["https://github.com/yeti-platform/yeti", "https://github.com/sebdraven/pyeti"],
"features": "This module add context and links between observables using yeti"
}

View File

@ -0,0 +1,11 @@
{
"description": "Defender for Endpoint KQL hunting query export module",
"requirements": [],
"features": "This module export an event as Defender for Endpoint KQL queries that can then be used in your own python3 or Powershell tool. If you are using Microsoft Sentinel, you can directly connect your MISP instance to Sentinel and then create queries using the `ThreatIntelligenceIndicator` table to match events against imported IOC.",
"references": [
"https://docs.microsoft.com/en-us/windows/security/threat-protection/microsoft-defender-atp/advanced-hunting-schema-reference"
],
"input": "MISP Event attributes",
"output": "Defender for Endpoint KQL queries",
"logo": "defender_endpoint.png"
}

View File

@ -0,0 +1,14 @@
{
"description": "Creates a VT Collection from an event iocs.",
"logo": "virustotal.png",
"requirements": [
"An access to the VirusTotal API (apikey)."
],
"input": "A domain, hash (md5, sha1, sha256 or sha512), hostname, url or IP address attribute.",
"output": "A VirusTotal collection in VT.",
"references": [
"https://www.virustotal.com/",
"https://blog.virustotal.com/2021/11/introducing-virustotal-collections.html"
],
"features": "This export module which takes advantage of a new endpoint in VT APIv3 to create VT Collections from IOCs contained in a MISP event. With this module users will be able to create a collection just using the Download as... button."
}

View File

@ -0,0 +1,12 @@
{
"description": "Passive DNS Common Output Format (COF) MISP importer",
"requirements": [
"PyMISP"
],
"features": "Takes as input a valid COF file or the output of the dnsdbflex utility and creates MISP objects for the input.",
"references": [
"https://tools.ietf.org/id/draft-dulaunoy-dnsop-passive-dns-cof-08.html"
],
"input": "Passive DNS output in Common Output Format (COF)",
"output": "MISP objects"
}

View File

@ -1,5 +1,5 @@
{
"description": "Module to import and parse reports from Lastline analysis links.",
"description": "Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.\n\nModule to import and parse reports from Lastline analysis links.",
"logo": "lastline.png",
"requirements": [],
"input": "Link to a Lastline analysis.",

View File

@ -7,7 +7,7 @@ User=www-data
Group=www-data
WorkingDirectory=/usr/local/src/misp-modules
Environment="PATH=/var/www/MISP/venv/bin"
ExecStart=/var/www/MISP/venv/bin/misp-modules -l 127.0.0.1 -s
ExecStart=/var/www/MISP/venv/bin/misp-modules -l 127.0.0.1
[Install]
WantedBy=multi-user.target

View File

@ -41,14 +41,14 @@ try:
from .modules import * # noqa
HAS_PACKAGE_MODULES = True
except Exception as e:
print(e)
logging.exception(e)
HAS_PACKAGE_MODULES = False
try:
from .helpers import * # noqa
HAS_PACKAGE_HELPERS = True
except Exception as e:
print(e)
logging.exception(e)
HAS_PACKAGE_HELPERS = False
log = logging.getLogger('misp-modules')
@ -183,10 +183,9 @@ class QueryModule(tornado.web.RequestHandler):
executor = ThreadPoolExecutor(nb_threads)
@run_on_executor
def run_request(self, jsonpayload):
x = json.loads(jsonpayload)
def run_request(self, module, jsonpayload):
log.debug('MISP QueryModule request {0}'.format(jsonpayload))
response = mhandlers[x['module']].handler(q=jsonpayload)
response = mhandlers[module].handler(q=jsonpayload)
return json.dumps(response)
@tornado.gen.coroutine
@ -198,7 +197,7 @@ class QueryModule(tornado.web.RequestHandler):
timeout = datetime.timedelta(seconds=int(dict_payload.get('timeout')))
else:
timeout = datetime.timedelta(seconds=300)
response = yield tornado.gen.with_timeout(timeout, self.run_request(jsonpayload))
response = yield tornado.gen.with_timeout(timeout, self.run_request(dict_payload['module'], jsonpayload))
self.write(response)
except tornado.gen.TimeoutError:
log.warning('Timeout on {} '.format(dict_payload['module']))

View File

@ -1,3 +1,4 @@
import joe_mapping
from .vt_graph_parser import * # noqa
all = ['joe_parser', 'lastline_api']
all = ['joe_parser', 'lastline_api', 'cof2misp', 'qintel_helper']

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,165 @@
"""
Common Output Format for passive DNS library.
Copyright 2021: Farsight Security (https://www.farsightsecurity.com/)
Author: Aaron Kaplan <aaron@lo-res.org>
Released under the Apache 2.0 license.
See: https://www.apache.org/licenses/LICENSE-2.0.txt
"""
import ipaddress
import sys
import ndjson
def is_valid_ip(ip: str) -> bool:
"""Check if an IP address given as string would be convertible to
an ipaddress object (and thus if it is a valid IP).
Returns
--------
True on success, False on validation failure.
"""
try:
ipaddress.ip_address(ip)
except Exception as ex:
print("is_valid_ip(%s) returned False. Reason: %s" % (ip, str(ex)), file = sys.stderr)
return False
return True
def is_cof_valid_strict(d: dict) -> bool:
"""Check the COF - do the full JSON schema validation.
Returns
--------
True on success, False on validation failure.
"""
return True # FIXME
def is_cof_valid_simple(d: dict) -> bool:
"""Check MANDATORY fields according to COF - simple check, do not do the full JSON schema validation.
Returns
--------
True on success, False on validation failure.
"""
if "rrname" not in d:
print("Missing MANDATORY field 'rrname'", file = sys.stderr)
return False
if not isinstance(d['rrname'], str):
print("Type error: 'rrname' is not a JSON string", file = sys.stderr)
return False
if "rrtype" not in d:
print("Missing MANDATORY field 'rrtype'", file = sys.stderr)
return False
if not isinstance(d['rrtype'], str):
print("Type error: 'rrtype' is not a JSON string", file = sys.stderr)
return False
if "rdata" not in d:
print("Missing MANDATORY field 'rdata'", file = sys.stderr)
return False
if "rdata" not in d:
print("Missing MANDATORY field 'rdata'", file = sys.stderr)
return False
if not isinstance(d['rdata'], str) and not isinstance(d['rdata'], list):
print("'rdata' is not a list and not a string.", file = sys.stderr)
return False
if not ("time_first" in d and "time_last" in d) or ("zone_time_first" in d and "zone_time_last" in d):
print("We are missing EITHER ('first_seen' and 'last_seen') OR ('zone_time_first' and zone_time_last') fields",
file = sys.stderr)
return False
# currently we don't check the OPTIONAL fields. Sorry... to be done later.
return True
def validate_cof(d: dict, strict=True) -> bool:
"""Validate an input passive DNS COF (given as dict).
strict might be set to False in order to loosen the checking.
With strict==True, a full JSON Schema validation will happen.
Returns
--------
True on success, False on validation failure.
"""
if not strict:
return is_cof_valid_simple(d)
else:
return is_cof_valid_strict(d)
def validate_dnsdbflex(d: dict, strict=True) -> bool:
"""
Validate if dict d is valid dnsdbflex. It should looks like this:
{ "rrtype": <str>, "rrname": <str> }
"""
if "rrname" not in d:
print("Missing MANDATORY field 'rrname'", file = sys.stderr)
return False
if not isinstance(d['rrname'], str):
print("Type error: 'rrname' is not a JSON string", file = sys.stderr)
return False
if "rrtype" not in d:
print("Missing MANDATORY field 'rrtype'", file = sys.stderr)
return False
if not isinstance(d['rrtype'], str):
print("Type error: 'rrtype' is not a JSON string", file = sys.stderr)
return False
return True
if __name__ == "__main__":
# simple, poor man's unit tests.
print(80 * "=", file = sys.stderr)
print("Unit Tests:", file = sys.stderr)
assert not is_valid_ip("a.2.3.4")
assert is_valid_ip("99.88.77.6")
assert is_valid_ip("2a0c:88:77:6::1")
# COF validation
print(80 * "=", file = sys.stderr)
print("COF unit tests....", file = sys.stderr)
mock_input = """{"count":1909,"rdata":["cpa.circl.lu"],"rrname":"www.circl.lu","rrtype":"CNAME","time_first":"1315586409","time_last":"1449566799"}
{"count":2560,"rdata":["cpab.circl.lu"],"rrname":"www.circl.lu","rrtype":"CNAME","time_first":"1449584660","time_last":"1617676151"}"""
i = 0
for entry in ndjson.loads(mock_input):
retval = validate_cof(entry, strict = False)
assert retval
print("line %d is valid: %s" % (i, retval))
i += 1
test2 = '{"count": 2, "time_first": 1619556027, "time_last": 1619556034, "rrname": "westernunion.com.ph.unblock-all.com.beta.opera-mini.net.", "rrtype": "A", "bailiwick": "beta.opera-mini.net.", "rdata": ["185.26.181.253"]}'
for entry in ndjson.loads(test2):
assert validate_cof(entry)
# dnsdbflex validation
print(80 * "=", file = sys.stderr)
print("dnsdbflex unit tests....", file = sys.stderr)
mock_input = """{"rrname":"labs.deep-insights.ai.","rrtype":"A"}
{"rrname":"www.deep-insights.ca.","rrtype":"CNAME"}
{"rrname":"mail.deep-insights.ca.","rrtype":"CNAME"}
{"rrname":"cpanel.deep-insights.ca.","rrtype":"A"}
{"rrname":"webdisk.deep-insights.ca.","rrtype":"A"}
{"rrname":"webmail.deep-insights.ca.","rrtype":"A"}"""
i = 0
for entry in ndjson.loads(mock_input):
retval = validate_dnsdbflex(entry, strict = False)
assert retval
print("dnsdbflex line %d is valid: %s" % (i, retval))
i += 1
print(80 * "=", file = sys.stderr)
print("Unit Tests DONE", file = sys.stderr)

View File

@ -0,0 +1,114 @@
arch_type_mapping = {
'ANDROID': 'parse_apk',
'LINUX': 'parse_elf',
'WINDOWS': 'parse_pe'
}
domain_object_mapping = {
'@ip': {'type': 'ip-dst', 'object_relation': 'ip'},
'@name': {'type': 'domain', 'object_relation': 'domain'}
}
dropped_file_mapping = {
'@entropy': {'type': 'float', 'object_relation': 'entropy'},
'@file': {'type': 'filename', 'object_relation': 'filename'},
'@size': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
'@type': {'type': 'mime-type', 'object_relation': 'mimetype'}
}
dropped_hash_mapping = {
'MD5': 'md5',
'SHA': 'sha1',
'SHA-256': 'sha256',
'SHA-512': 'sha512'
}
elf_object_mapping = {
'epaddr': 'entrypoint-address',
'machine': 'arch',
'osabi': 'os_abi'
}
elf_section_flags_mapping = {
'A': 'ALLOC',
'I': 'INFO_LINK',
'M': 'MERGE',
'S': 'STRINGS',
'T': 'TLS',
'W': 'WRITE',
'X': 'EXECINSTR'
}
file_object_fields = (
'filename',
'md5',
'sha1',
'sha256',
'sha512',
'ssdeep'
)
file_object_mapping = {
'entropy': {'type': 'float', 'object_relation': 'entropy'},
'filesize': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
'filetype': {'type': 'mime-type', 'object_relation': 'mimetype'}
}
file_references_mapping = {
'fileCreated': 'creates',
'fileDeleted': 'deletes',
'fileMoved': 'moves',
'fileRead': 'reads',
'fileWritten': 'writes'
}
network_behavior_fields = ('srcip', 'dstip', 'srcport', 'dstport')
network_connection_object_mapping = {
'srcip': {'type': 'ip-src', 'object_relation': 'ip-src'},
'dstip': {'type': 'ip-dst', 'object_relation': 'ip-dst'},
'srcport': {'type': 'port', 'object_relation': 'src-port'},
'dstport': {'type': 'port', 'object_relation': 'dst-port'}
}
pe_object_fields = {
'entrypoint': {'type': 'text', 'object_relation': 'entrypoint-address'},
'imphash': {'type': 'imphash', 'object_relation': 'imphash'}
}
pe_object_mapping = {
'CompanyName': 'company-name',
'FileDescription': 'file-description',
'FileVersion': 'file-version',
'InternalName': 'internal-filename',
'LegalCopyright': 'legal-copyright',
'OriginalFilename': 'original-filename',
'ProductName': 'product-filename',
'ProductVersion': 'product-version',
'Translation': 'lang-id'
}
pe_section_object_mapping = {
'characteristics': {'type': 'text', 'object_relation': 'characteristic'},
'entropy': {'type': 'float', 'object_relation': 'entropy'},
'name': {'type': 'text', 'object_relation': 'name'},
'rawaddr': {'type': 'hex', 'object_relation': 'offset'},
'rawsize': {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'},
'virtaddr': {'type': 'hex', 'object_relation': 'virtual_address'},
'virtsize': {'type': 'size-in-bytes', 'object_relation': 'virtual_size'}
}
process_object_fields = {
'cmdline': 'command-line',
'name': 'name',
'parentpid': 'parent-pid',
'pid': 'pid',
'path': 'current-directory'
}
protocols = {
'tcp': 4,
'udp': 4,
'icmp': 3,
'http': 7,
'https': 7,
'ftp': 7
}
registry_references_mapping = {
'keyValueCreated': 'creates',
'keyValueModified': 'modifies'
}
regkey_object_mapping = {
'name': {'type': 'text', 'object_relation': 'name'},
'newdata': {'type': 'text', 'object_relation': 'data'},
'path': {'type': 'regkey', 'object_relation': 'key'}
}
signerinfo_object_mapping = {
'sigissuer': {'type': 'text', 'object_relation': 'issuer'},
'version': {'type': 'text', 'object_relation': 'version'}
}

View File

@ -1,53 +1,15 @@
# -*- coding: utf-8 -*-
import json
from collections import defaultdict
from datetime import datetime
from pymisp import MISPAttribute, MISPEvent, MISPObject
import json
arch_type_mapping = {'ANDROID': 'parse_apk', 'LINUX': 'parse_elf', 'WINDOWS': 'parse_pe'}
domain_object_mapping = {'@ip': ('ip-dst', 'ip'), '@name': ('domain', 'domain')}
dropped_file_mapping = {'@entropy': ('float', 'entropy'),
'@file': ('filename', 'filename'),
'@size': ('size-in-bytes', 'size-in-bytes'),
'@type': ('mime-type', 'mimetype')}
dropped_hash_mapping = {'MD5': 'md5', 'SHA': 'sha1', 'SHA-256': 'sha256', 'SHA-512': 'sha512'}
elf_object_mapping = {'epaddr': 'entrypoint-address', 'machine': 'arch', 'osabi': 'os_abi'}
elf_section_flags_mapping = {'A': 'ALLOC', 'I': 'INFO_LINK', 'M': 'MERGE',
'S': 'STRINGS', 'T': 'TLS', 'W': 'WRITE',
'X': 'EXECINSTR'}
file_object_fields = ['filename', 'md5', 'sha1', 'sha256', 'sha512', 'ssdeep']
file_object_mapping = {'entropy': ('float', 'entropy'),
'filesize': ('size-in-bytes', 'size-in-bytes'),
'filetype': ('mime-type', 'mimetype')}
file_references_mapping = {'fileCreated': 'creates', 'fileDeleted': 'deletes',
'fileMoved': 'moves', 'fileRead': 'reads', 'fileWritten': 'writes'}
network_behavior_fields = ('srcip', 'dstip', 'srcport', 'dstport')
network_connection_object_mapping = {'srcip': ('ip-src', 'ip-src'), 'dstip': ('ip-dst', 'ip-dst'),
'srcport': ('port', 'src-port'), 'dstport': ('port', 'dst-port')}
pe_object_fields = {'entrypoint': ('text', 'entrypoint-address'),
'imphash': ('imphash', 'imphash')}
pe_object_mapping = {'CompanyName': 'company-name', 'FileDescription': 'file-description',
'FileVersion': 'file-version', 'InternalName': 'internal-filename',
'LegalCopyright': 'legal-copyright', 'OriginalFilename': 'original-filename',
'ProductName': 'product-filename', 'ProductVersion': 'product-version',
'Translation': 'lang-id'}
pe_section_object_mapping = {'characteristics': ('text', 'characteristic'),
'entropy': ('float', 'entropy'),
'name': ('text', 'name'), 'rawaddr': ('hex', 'offset'),
'rawsize': ('size-in-bytes', 'size-in-bytes'),
'virtaddr': ('hex', 'virtual_address'),
'virtsize': ('size-in-bytes', 'virtual_size')}
process_object_fields = {'cmdline': 'command-line', 'name': 'name',
'parentpid': 'parent-pid', 'pid': 'pid',
'path': 'current-directory'}
protocols = {'tcp': 4, 'udp': 4, 'icmp': 3,
'http': 7, 'https': 7, 'ftp': 7}
registry_references_mapping = {'keyValueCreated': 'creates', 'keyValueModified': 'modifies'}
regkey_object_mapping = {'name': ('text', 'name'), 'newdata': ('text', 'data'),
'path': ('regkey', 'key')}
signerinfo_object_mapping = {'sigissuer': ('text', 'issuer'),
'version': ('text', 'version')}
from joe_mapping import (arch_type_mapping, domain_object_mapping,
dropped_file_mapping, dropped_hash_mapping, elf_object_mapping,
elf_section_flags_mapping, file_object_fields, file_object_mapping,
file_references_mapping, network_behavior_fields,
network_connection_object_mapping, pe_object_fields, pe_object_mapping,
pe_section_object_mapping, process_object_fields, protocols,
registry_references_mapping, regkey_object_mapping, signerinfo_object_mapping)
class JoeParser():
@ -57,7 +19,7 @@ class JoeParser():
self.attributes = defaultdict(lambda: defaultdict(set))
self.process_references = {}
self.import_pe = config["import_pe"]
self.import_executable = config["import_executable"]
self.create_mitre_attack = config["mitre_attack"]
def parse_data(self, data):
@ -101,26 +63,46 @@ class JoeParser():
for droppedfile in droppedinfo['hash']:
file_object = MISPObject('file')
for key, mapping in dropped_file_mapping.items():
attribute_type, object_relation = mapping
file_object.add_attribute(object_relation, **{'type': attribute_type, 'value': droppedfile[key], 'to_ids': False})
if droppedfile.get(key) is not None:
attribute = {'value': droppedfile[key], 'to_ids': False}
attribute.update(mapping)
file_object.add_attribute(**attribute)
if droppedfile['@malicious'] == 'true':
file_object.add_attribute('state', **{'type': 'text', 'value': 'Malicious', 'to_ids': False})
file_object.add_attribute(
**{
'type': 'text',
'object_relation': 'state',
'value': 'Malicious',
'to_ids': False
}
)
for h in droppedfile['value']:
hash_type = dropped_hash_mapping[h['@algo']]
file_object.add_attribute(hash_type, **{'type': hash_type, 'value': h['$'], 'to_ids': False})
self.misp_event.add_object(**file_object)
self.references[self.process_references[(int(droppedfile['@targetid']), droppedfile['@process'])]].append({
'referenced_uuid': file_object.uuid,
'relationship_type': 'drops'
})
file_object.add_attribute(
**{
'type': hash_type,
'object_relation': hash_type,
'value': h['$'],
'to_ids': False
}
)
self.misp_event.add_object(file_object)
reference_key = (int(droppedfile['@targetid']), droppedfile['@process'])
if reference_key in self.process_references:
self.references[self.process_references[reference_key]].append(
{
'referenced_uuid': file_object.uuid,
'relationship_type': 'drops'
}
)
def parse_mitre_attack(self):
mitreattack = self.data['mitreattack']
mitreattack = self.data.get('mitreattack', {})
if mitreattack:
for tactic in mitreattack['tactic']:
if tactic.get('technique'):
for technique in tactic['technique']:
self.misp_event.add_tag('misp-galaxy:mitre-attack-pattern="{} - {}"'.format(technique['name'], technique['id']))
self.misp_event.add_tag(f'misp-galaxy:mitre-attack-pattern="{technique["name"]} - {technique["id"]}"')
def parse_network_behavior(self):
network = self.data['behavior']['network']
@ -129,44 +111,74 @@ class JoeParser():
if network.get(protocol):
for packet in network[protocol]['packet']:
timestamp = datetime.strptime(self.parse_timestamp(packet['timestamp']), '%b %d, %Y %H:%M:%S.%f')
connections[tuple(packet[field] for field in network_behavior_fields)][protocol].add(timestamp)
connections[tuple(packet.get(field) for field in network_behavior_fields)][protocol].add(timestamp)
for connection, data in connections.items():
attributes = self.prefetch_attributes_data(connection)
if len(data.keys()) == len(set(protocols[protocol] for protocol in data.keys())):
network_connection_object = MISPObject('network-connection')
for object_relation, attribute in attributes.items():
network_connection_object.add_attribute(object_relation, **attribute)
network_connection_object.add_attribute('first-packet-seen',
**{'type': 'datetime',
'value': min(tuple(min(timestamp) for timestamp in data.values())),
'to_ids': False})
for attribute in attributes:
network_connection_object.add_attribute(**attribute)
network_connection_object.add_attribute(
**{
'type': 'datetime',
'object_relation': 'first-packet-seen',
'value': min(tuple(min(timestamp) for timestamp in data.values())),
'to_ids': False
}
)
for protocol in data.keys():
network_connection_object.add_attribute('layer{}-protocol'.format(protocols[protocol]),
**{'type': 'text', 'value': protocol, 'to_ids': False})
self.misp_event.add_object(**network_connection_object)
network_connection_object.add_attribute(
**{
'type': 'text',
'object_relation': f'layer{protocols[protocol]}-protocol',
'value': protocol,
'to_ids': False
}
)
self.misp_event.add_object(network_connection_object)
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
relationship_type='initiates'))
else:
for protocol, timestamps in data.items():
network_connection_object = MISPObject('network-connection')
for object_relation, attribute in attributes.items():
network_connection_object.add_attribute(object_relation, **attribute)
network_connection_object.add_attribute('first-packet-seen', **{'type': 'datetime', 'value': min(timestamps), 'to_ids': False})
network_connection_object.add_attribute('layer{}-protocol'.format(protocols[protocol]), **{'type': 'text', 'value': protocol, 'to_ids': False})
self.misp_event.add_object(**network_connection_object)
for attribute in attributes:
network_connection_object.add_attribute(**attribute)
network_connection_object.add_attribute(
**{
'type': 'datetime',
'object_relation': 'first-packet-seen',
'value': min(timestamps),
'to_ids': False
}
)
network_connection_object.add_attribute(
**{
'type': 'text',
'object_relation': f'layer{protocols[protocol]}-protocol',
'value': protocol,
'to_ids': False
}
)
self.misp_event.add_object(network_connection_object)
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
relationship_type='initiates'))
def parse_screenshot(self):
screenshotdata = self.data['behavior']['screenshotdata']
if screenshotdata:
screenshotdata = screenshotdata['interesting']['$']
attribute = {'type': 'attachment', 'value': 'screenshot.jpg',
'data': screenshotdata, 'disable_correlation': True,
'to_ids': False}
self.misp_event.add_attribute(**attribute)
if self.data['behavior'].get('screenshotdata', {}).get('interesting') is not None:
screenshotdata = self.data['behavior']['screenshotdata']['interesting']['$']
self.misp_event.add_attribute(
**{
'type': 'attachment',
'value': 'screenshot.jpg',
'data': screenshotdata,
'disable_correlation': True,
'to_ids': False
}
)
def parse_system_behavior(self):
if not 'system' in self.data['behavior']:
return
system = self.data['behavior']['system']
if system.get('processes'):
process_activities = {'fileactivities': self.parse_fileactivities,
@ -175,10 +187,24 @@ class JoeParser():
general = process['general']
process_object = MISPObject('process')
for feature, relation in process_object_fields.items():
process_object.add_attribute(relation, **{'type': 'text', 'value': general[feature], 'to_ids': False})
start_time = datetime.strptime('{} {}'.format(general['date'], general['time']), '%d/%m/%Y %H:%M:%S')
process_object.add_attribute('start-time', **{'type': 'datetime', 'value': start_time, 'to_ids': False})
self.misp_event.add_object(**process_object)
process_object.add_attribute(
**{
'type': 'text',
'object_relation': relation,
'value': general[feature],
'to_ids': False
}
)
start_time = datetime.strptime(f"{general['date']} {general['time']}", '%d/%m/%Y %H:%M:%S')
process_object.add_attribute(
**{
'type': 'datetime',
'object_relation': 'start-time',
'value': start_time,
'to_ids': False
}
)
self.misp_event.add_object(process_object)
for field, to_call in process_activities.items():
if process.get(field):
to_call(process_object.uuid, process[field])
@ -211,9 +237,15 @@ class JoeParser():
url_object = MISPObject("url")
self.analysisinfo_uuid = url_object.uuid
url_object.add_attribute("url", generalinfo["target"]["url"], to_ids=False)
self.misp_event.add_object(**url_object)
url_object.add_attribute(
**{
'type': 'url',
'object_relation': 'url',
'value': generalinfo["target"]["url"],
'to_ids': False
}
)
self.misp_event.add_object(url_object)
def parse_fileinfo(self):
fileinfo = self.data['fileinfo']
@ -222,20 +254,29 @@ class JoeParser():
self.analysisinfo_uuid = file_object.uuid
for field in file_object_fields:
file_object.add_attribute(field, **{'type': field, 'value': fileinfo[field], 'to_ids': False})
file_object.add_attribute(
**{
'type': field,
'object_relation': field,
'value': fileinfo[field],
'to_ids': False
}
)
for field, mapping in file_object_mapping.items():
attribute_type, object_relation = mapping
file_object.add_attribute(object_relation, **{'type': attribute_type, 'value': fileinfo[field], 'to_ids': False})
if fileinfo.get(field) is not None:
attribute = {'value': fileinfo[field], 'to_ids': False}
attribute.update(mapping)
file_object.add_attribute(**attribute)
arch = self.data['generalinfo']['arch']
if arch in arch_type_mapping:
if self.import_executable and arch in arch_type_mapping:
to_call = arch_type_mapping[arch]
getattr(self, to_call)(fileinfo, file_object)
else:
self.misp_event.add_object(**file_object)
self.misp_event.add_object(file_object)
def parse_apk(self, fileinfo, file_object):
apkinfo = fileinfo['apk']
self.misp_event.add_object(**file_object)
self.misp_event.add_object(file_object)
permission_lists = defaultdict(list)
for permission in apkinfo['requiredpermissions']['permission']:
permission = permission['@name'].split('.')
@ -243,16 +284,30 @@ class JoeParser():
attribute_type = 'text'
for comment, permissions in permission_lists.items():
permission_object = MISPObject('android-permission')
permission_object.add_attribute('comment', **dict(type=attribute_type, value=comment, to_ids=False))
permission_object.add_attribute(
**{
'type': attribute_type,
'object_relation': 'comment',
'value': comment,
'to_ids': False
}
)
for permission in permissions:
permission_object.add_attribute('permission', **dict(type=attribute_type, value=permission, to_ids=False))
self.misp_event.add_object(**permission_object)
permission_object.add_attribute(
**{
'type': attribute_type,
'object_relation': 'permission',
'value': permission,
'to_ids': False
}
)
self.misp_event.add_object(permission_object)
self.references[file_object.uuid].append(dict(referenced_uuid=permission_object.uuid,
relationship_type='grants'))
def parse_elf(self, fileinfo, file_object):
elfinfo = fileinfo['elf']
self.misp_event.add_object(**file_object)
self.misp_event.add_object(file_object)
attribute_type = 'text'
relationship = 'includes'
size = 'size-in-bytes'
@ -264,47 +319,96 @@ class JoeParser():
if elf.get('type'):
# Haven't seen anything but EXEC yet in the files I tested
attribute_value = "EXECUTABLE" if elf['type'] == "EXEC (Executable file)" else elf['type']
elf_object.add_attribute('type', **dict(type=attribute_type, value=attribute_value, to_ids=False))
elf_object.add_attribute(
**{
'type': attribute_type,
'object_relation': 'type',
'value': attribute_value,
'to_ids': False
}
)
for feature, relation in elf_object_mapping.items():
if elf.get(feature):
elf_object.add_attribute(relation, **dict(type=attribute_type, value=elf[feature], to_ids=False))
elf_object.add_attribute(
**{
'type': attribute_type,
'object_relation': relation,
'value': elf[feature],
'to_ids': False
}
)
sections_number = len(fileinfo['sections']['section'])
elf_object.add_attribute('number-sections', **{'type': 'counter', 'value': sections_number, 'to_ids': False})
self.misp_event.add_object(**elf_object)
elf_object.add_attribute(
**{
'type': 'counter',
'object_relation': 'number-sections',
'value': sections_number,
'to_ids': False
}
)
self.misp_event.add_object(elf_object)
for section in fileinfo['sections']['section']:
section_object = MISPObject('elf-section')
for feature in ('name', 'type'):
if section.get(feature):
section_object.add_attribute(feature, **dict(type=attribute_type, value=section[feature], to_ids=False))
section_object.add_attribute(
**{
'type': attribute_type,
'object_relation': feature,
'value': section[feature],
'to_ids': False
}
)
if section.get('size'):
section_object.add_attribute(size, **dict(type=size, value=int(section['size'], 16), to_ids=False))
section_object.add_attribute(
**{
'type': size,
'object_relation': size,
'value': int(section['size'], 16),
'to_ids': False
}
)
for flag in section['flagsdesc']:
try:
attribute_value = elf_section_flags_mapping[flag]
section_object.add_attribute('flag', **dict(type=attribute_type, value=attribute_value, to_ids=False))
section_object.add_attribute(
**{
'type': attribute_type,
'object_relation': 'flag',
'value': attribute_value,
'to_ids': False
}
)
except KeyError:
print(f'Unknown elf section flag: {flag}')
continue
self.misp_event.add_object(**section_object)
self.misp_event.add_object(section_object)
self.references[elf_object.uuid].append(dict(referenced_uuid=section_object.uuid,
relationship_type=relationship))
def parse_pe(self, fileinfo, file_object):
if not self.import_pe:
return
try:
peinfo = fileinfo['pe']
except KeyError:
self.misp_event.add_object(**file_object)
self.misp_event.add_object(file_object)
return
pe_object = MISPObject('pe')
relationship = 'includes'
file_object.add_reference(pe_object.uuid, relationship)
self.misp_event.add_object(**file_object)
self.misp_event.add_object(file_object)
for field, mapping in pe_object_fields.items():
attribute_type, object_relation = mapping
pe_object.add_attribute(object_relation, **{'type': attribute_type, 'value': peinfo[field], 'to_ids': False})
pe_object.add_attribute('compilation-timestamp', **{'type': 'datetime', 'value': int(peinfo['timestamp'].split()[0], 16), 'to_ids': False})
if peinfo.get(field) is not None:
attribute = {'value': peinfo[field], 'to_ids': False}
attribute.update(mapping)
pe_object.add_attribute(**attribute)
pe_object.add_attribute(
**{
'type': 'datetime',
'object_relation': 'compilation-timestamp',
'value': int(peinfo['timestamp'].split()[0], 16),
'to_ids': False
}
)
program_name = fileinfo['filename']
if peinfo['versions']:
for feature in peinfo['versions']['version']:
@ -312,33 +416,57 @@ class JoeParser():
if name == 'InternalName':
program_name = feature['value']
if name in pe_object_mapping:
pe_object.add_attribute(pe_object_mapping[name], **{'type': 'text', 'value': feature['value'], 'to_ids': False})
pe_object.add_attribute(
**{
'type': 'text',
'object_relation': pe_object_mapping[name],
'value': feature['value'],
'to_ids': False
}
)
sections_number = len(peinfo['sections']['section'])
pe_object.add_attribute('number-sections', **{'type': 'counter', 'value': sections_number, 'to_ids': False})
pe_object.add_attribute(
**{
'type': 'counter',
'object_relation': 'number-sections',
'value': sections_number,
'to_ids': False
}
)
signatureinfo = peinfo['signature']
if signatureinfo['signed']:
signerinfo_object = MISPObject('authenticode-signerinfo')
pe_object.add_reference(signerinfo_object.uuid, 'signed-by')
self.misp_event.add_object(**pe_object)
signerinfo_object.add_attribute('program-name', **{'type': 'text', 'value': program_name, 'to_ids': False})
self.misp_event.add_object(pe_object)
signerinfo_object.add_attribute(
**{
'type': 'text',
'object_relation': 'program-name',
'value': program_name,
'to_ids': False
}
)
for feature, mapping in signerinfo_object_mapping.items():
attribute_type, object_relation = mapping
signerinfo_object.add_attribute(object_relation, **{'type': attribute_type, 'value': signatureinfo[feature], 'to_ids': False})
self.misp_event.add_object(**signerinfo_object)
if signatureinfo.get(feature) is not None:
attribute = {'value': signatureinfo[feature], 'to_ids': False}
attribute.update(mapping)
signerinfo_object.add_attribute(**attribute)
self.misp_event.add_object(signerinfo_object)
else:
self.misp_event.add_object(**pe_object)
self.misp_event.add_object(pe_object)
for section in peinfo['sections']['section']:
section_object = self.parse_pe_section(section)
self.references[pe_object.uuid].append(dict(referenced_uuid=section_object.uuid,
relationship_type=relationship))
self.misp_event.add_object(**section_object)
self.misp_event.add_object(section_object)
def parse_pe_section(self, section):
section_object = MISPObject('pe-section')
for feature, mapping in pe_section_object_mapping.items():
if section.get(feature):
attribute_type, object_relation = mapping
section_object.add_attribute(object_relation, **{'type': attribute_type, 'value': section[feature], 'to_ids': False})
if section.get(feature) is not None:
attribute = {'value': section[feature], 'to_ids': False}
attribute.update(mapping)
section_object.add_attribute(**attribute)
return section_object
def parse_network_interactions(self):
@ -348,10 +476,11 @@ class JoeParser():
if domain['@ip'] != 'unknown':
domain_object = MISPObject('domain-ip')
for key, mapping in domain_object_mapping.items():
attribute_type, object_relation = mapping
domain_object.add_attribute(object_relation,
**{'type': attribute_type, 'value': domain[key], 'to_ids': False})
self.misp_event.add_object(**domain_object)
if domain.get(key) is not None:
attribute = {'value': domain[key], 'to_ids': False}
attribute.update(mapping)
domain_object.add_attribute(**attribute)
self.misp_event.add_object(domain_object)
reference = dict(referenced_uuid=domain_object.uuid, relationship_type='contacts')
self.add_process_reference(domain['@targetid'], domain['@currentpath'], reference)
else:
@ -394,10 +523,19 @@ class JoeParser():
for call in registryactivities[feature]['call']:
registry_key = MISPObject('registry-key')
for field, mapping in regkey_object_mapping.items():
attribute_type, object_relation = mapping
registry_key.add_attribute(object_relation, **{'type': attribute_type, 'value': call[field], 'to_ids': False})
registry_key.add_attribute('data-type', **{'type': 'text', 'value': 'REG_{}'.format(call['type'].upper()), 'to_ids': False})
self.misp_event.add_object(**registry_key)
if call.get(field) is not None:
attribute = {'value': call[field], 'to_ids': False}
attribute.update(mapping)
registry_key.add_attribute(**attribute)
registry_key.add_attribute(
**{
'type': 'text',
'object_relation': 'data-type',
'value': f"REG_{call['type'].upper()}",
'to_ids': False
}
)
self.misp_event.add_object(registry_key)
self.references[process_uuid].append(dict(referenced_uuid=registry_key.uuid,
relationship_type=relationship))
@ -427,8 +565,9 @@ class JoeParser():
@staticmethod
def prefetch_attributes_data(connection):
attributes = {}
attributes = []
for field, value in zip(network_behavior_fields, connection):
attribute_type, object_relation = network_connection_object_mapping[field]
attributes[object_relation] = {'type': attribute_type, 'value': value, 'to_ids': False}
attribute = {'value': value, 'to_ids': False}
attribute.update(network_connection_object_mapping[field])
attributes.append(attribute)
return attributes

@ -0,0 +1 @@
Subproject commit 9dc7e3578f2165e32a3b7cdd09e9e552f2d98d36

View File

@ -0,0 +1,263 @@
# Copyright (c) 2009-2021 Qintel, LLC
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
from urllib.request import Request, urlopen
from urllib.parse import urlencode
from urllib.error import HTTPError
from time import sleep
from json import loads
import os
from copy import deepcopy
from datetime import datetime, timedelta
from gzip import GzipFile
VERSION = '1.0.1'
USER_AGENT = 'integrations-helper'
MAX_RETRY_ATTEMPTS = 5
DEFAULT_HEADERS = {
'User-Agent': f'{USER_AGENT}/{VERSION}'
}
REMOTE_MAP = {
'pmi': 'https://api.pmi.qintel.com',
'qwatch': 'https://api.qwatch.qintel.com',
'qauth': 'https://api.qauth.qintel.com',
'qsentry_feed': 'https://qsentry.qintel.com',
'qsentry': 'https://api.qsentry.qintel.com'
}
ENDPOINT_MAP = {
'pmi': {
'ping': '/users/me',
'cve': 'cves'
},
'qsentry_feed': {
'anon': '/files/anonymization',
'mal_hosting': '/files/malicious_hosting'
},
'qsentry': {},
'qwatch': {
'ping': '/users/me',
'exposures': 'exposures'
},
'qauth': {}
}
def _get_request_wait_time(attempts):
""" Use Fibonacci numbers for determining the time to wait when rate limits
have been encountered.
"""
n = attempts + 3
a, b = 1, 0
for _ in range(n):
a, b = a + b, a
return a
def _search(**kwargs):
remote = kwargs.get('remote')
max_retries = int(kwargs.get('max_retries', MAX_RETRY_ATTEMPTS))
params = kwargs.get('params', {})
headers = _set_headers(**kwargs)
logger = kwargs.get('logger')
params = urlencode(params)
url = remote + "?" + params
req = Request(url, headers=headers)
request_attempts = 1
while request_attempts < max_retries:
try:
return urlopen(req)
except HTTPError as e:
response = e
except Exception as e:
raise Exception('API connection error') from e
if response.code not in [429, 504]:
raise Exception(f'API connection error: {response}')
if request_attempts < max_retries:
wait_time = _get_request_wait_time(request_attempts)
if response.code == 429:
msg = 'rate limit reached on attempt {request_attempts}, ' \
'waiting {wait_time} seconds'
if logger:
logger(msg)
else:
msg = f'connection timed out, retrying in {wait_time} seconds'
if logger:
logger(msg)
sleep(wait_time)
else:
raise Exception('Max API retries exceeded')
request_attempts += 1
def _set_headers(**kwargs):
headers = deepcopy(DEFAULT_HEADERS)
if kwargs.get('user_agent'):
headers['User-Agent'] = \
f"{kwargs['user_agent']}/{USER_AGENT}/{VERSION}"
# TODO: deprecate
if kwargs.get('client_id') or kwargs.get('client_secret'):
try:
headers['Cf-Access-Client-Id'] = kwargs['client_id']
headers['Cf-Access-Client-Secret'] = kwargs['client_secret']
except KeyError:
raise Exception('missing client_id or client_secret')
if kwargs.get('token'):
headers['x-api-key'] = kwargs['token']
return headers
def _set_remote(product, query_type, **kwargs):
remote = kwargs.get('remote')
endpoint = kwargs.get('endpoint', ENDPOINT_MAP[product].get(query_type))
if not remote:
remote = REMOTE_MAP[product]
if not endpoint:
raise Exception('invalid search type')
remote = remote.rstrip('/')
endpoint = endpoint.lstrip('/')
return f'{remote}/{endpoint}'
def _process_qsentry(resp):
if resp.getheader('Content-Encoding', '') == 'gzip':
with GzipFile(fileobj=resp) as file:
for line in file.readlines():
yield loads(line)
def search_pmi(search_term, query_type, **kwargs):
"""
Search PMI
:param str search_term: Search term
:param str query_type: Query type [cve|ping]
:param dict kwargs: extra client args [remote|token|params]
:return: API JSON response object
:rtype: dict
"""
kwargs['remote'] = _set_remote('pmi', query_type, **kwargs)
kwargs['token'] = kwargs.get('token', os.getenv('PMI_TOKEN'))
params = kwargs.get('params', {})
params.update({'identifier': search_term})
kwargs['params'] = params
return loads(_search(**kwargs).read())
def search_qwatch(search_term, search_type, query_type, **kwargs):
"""
Search QWatch for exposed credentials
:param str search_term: Search term
:param str search_type: Search term type [domain|email]
:param str query_type: Query type [exposures]
:param dict kwargs: extra client args [remote|token|params]
:return: API JSON response object
:rtype: dict
"""
kwargs['remote'] = _set_remote('qwatch', query_type, **kwargs)
kwargs['token'] = kwargs.get('token', os.getenv('QWATCH_TOKEN'))
params = kwargs.get('params', {})
if search_type:
params.update({search_type: search_term})
kwargs['params'] = params
return loads(_search(**kwargs).read())
def search_qauth(search_term, **kwargs):
"""
Search QAuth
:param str search_term: Search term
:param dict kwargs: extra client args [remote|token|params]
:return: API JSON response object
:rtype: dict
"""
if not kwargs.get('endpoint'):
kwargs['endpoint'] = '/'
kwargs['remote'] = _set_remote('qauth', None, **kwargs)
kwargs['token'] = kwargs.get('token', os.getenv('QAUTH_TOKEN'))
params = kwargs.get('params', {})
params.update({'q': search_term})
kwargs['params'] = params
return loads(_search(**kwargs).read())
def search_qsentry(search_term, **kwargs):
"""
Search QSentry
:param str search_term: Search term
:param dict kwargs: extra client args [remote|token|params]
:return: API JSON response object
:rtype: dict
"""
if not kwargs.get('endpoint'):
kwargs['endpoint'] = '/'
kwargs['remote'] = _set_remote('qsentry', None, **kwargs)
kwargs['token'] = kwargs.get('token', os.getenv('QSENTRY_TOKEN'))
params = kwargs.get('params', {})
params.update({'q': search_term})
kwargs['params'] = params
return loads(_search(**kwargs).read())
def qsentry_feed(query_type='anon', feed_date=datetime.today(), **kwargs):
"""
Fetch the most recent QSentry Feed
:param str query_type: Feed type [anon|mal_hosting]
:param dict kwargs: extra client args [remote|token|params]
:param datetime feed_date: feed date to fetch
:return: API JSON response object
:rtype: Iterator[dict]
"""
remote = _set_remote('qsentry_feed', query_type, **kwargs)
kwargs['token'] = kwargs.get('token', os.getenv('QSENTRY_TOKEN'))
feed_date = (feed_date - timedelta(days=1)).strftime('%Y%m%d')
kwargs['remote'] = f'{remote}/{feed_date}'
resp = _search(**kwargs)
for r in _process_qsentry(resp):
yield r

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,460 @@
################################################################################
# ATTRIBUTES AND OBJECTS MAPPING #
################################################################################
attributes_mapping = {
'filename': '_parse_name',
'ip-src': '_parse_value',
'ip-dst': '_parse_value',
'hostname': '_parse_value',
'domain': '_parse_value',
'domain|ip': '_parse_domain_ip_attribute',
'email-src': '_parse_value',
'email-dst': '_parse_value',
'email-attachment': '_parse_name',
'url': '_parse_value',
'regkey': '_parse_regkey_attribute',
'regkey|value': '_parse_regkey_value',
'malware-sample': '_parse_malware_sample',
'mutex': '_parse_name',
'uri': '_parse_value',
'port': '_parse_port',
'ip-dst|port': '_parse_network_attribute',
'ip-src|port': '_parse_network_attribute',
'hostname|port': '_parse_network_attribute',
'email-reply-to': '_parse_email_reply_to',
'attachment': '_parse_attachment',
'mac-address': '_parse_value',
'AS': '_parse_number'
}
attributes_type_mapping = {
'md5': '_parse_hash',
'sha1': '_parse_hash',
'sha256': '_parse_hash',
'filename|md5': '_parse_filename_hash',
'filename|sha1': '_parse_filename_hash',
'filename|sha256': '_parse_filename_hash',
'email-subject': '_parse_email_message',
'email-body': '_parse_email_message',
'authentihash': '_parse_hash',
'ssdeep': '_parse_hash',
'imphash': '_parse_hash',
'pehash': '_parse_hash',
'impfuzzy': '_parse_hash',
'sha224': '_parse_hash',
'sha384': '_parse_hash',
'sha512': '_parse_hash',
'sha512/224': '_parse_hash',
'sha512/256': '_parse_hash',
'tlsh': '_parse_hash',
'cdhash': '_parse_hash',
'filename|authentihash': '_parse_filename_hash',
'filename|ssdeep': '_parse_filename_hash',
'filename|imphash': '_parse_filename_hash',
'filename|impfuzzy': '_parse_filename_hash',
'filename|pehash': '_parse_filename_hash',
'filename|sha224': '_parse_filename_hash',
'filename|sha384': '_parse_filename_hash',
'filename|sha512': '_parse_filename_hash',
'filename|sha512/224': '_parse_filename_hash',
'filename|sha512/256': '_parse_filename_hash',
'filename|tlsh': '_parse_filename_hash',
'x509-fingerprint-md5': '_parse_x509_attribute',
'x509-fingerprint-sha1': '_parse_x509_attribute',
'x509-fingerprint-sha256': '_parse_x509_attribute'
}
objects_mapping = {
'asn': {
'observable': 'parse_asn_observable',
'pattern': 'parse_asn_pattern'},
'credential': {
'observable': 'parse_credential_observable',
'pattern': 'parse_credential_pattern'},
'domain-ip': {
'observable': 'parse_domain_ip_observable',
'pattern': 'parse_domain_ip_pattern'},
'email': {
'observable': 'parse_email_observable',
'pattern': 'parse_email_pattern'},
'file': {
'observable': 'parse_file_observable',
'pattern': 'parse_file_pattern'},
'ip-port': {
'observable': 'parse_ip_port_observable',
'pattern': 'parse_ip_port_pattern'},
'network-connection': {
'observable': 'parse_network_connection_observable',
'pattern': 'parse_network_connection_pattern'},
'network-socket': {
'observable': 'parse_network_socket_observable',
'pattern': 'parse_network_socket_pattern'},
'process': {
'observable': 'parse_process_observable',
'pattern': 'parse_process_pattern'},
'registry-key': {
'observable': 'parse_regkey_observable',
'pattern': 'parse_regkey_pattern'},
'url': {
'observable': 'parse_url_observable',
'pattern': 'parse_url_pattern'},
'user-account': {
'observable': 'parse_user_account_observable',
'pattern': 'parse_user_account_pattern'},
'WindowsPEBinaryFile': {
'observable': 'parse_pe_observable',
'pattern': 'parse_pe_pattern'},
'x509': {
'observable': 'parse_x509_observable',
'pattern': 'parse_x509_pattern'}
}
observable_mapping = {
('artifact', 'file'): 'parse_file_observable',
('artifact', 'directory', 'file'): 'parse_file_observable',
('artifact', 'email-addr', 'email-message', 'file'): 'parse_email_observable',
('autonomous-system',): 'parse_asn_observable',
('autonomous-system', 'ipv4-addr'): 'parse_asn_observable',
('autonomous-system', 'ipv6-addr'): 'parse_asn_observable',
('autonomous-system', 'ipv4-addr', 'ipv6-addr'): 'parse_asn_observable',
('directory', 'file'): 'parse_file_observable',
('domain-name',): 'parse_domain_ip_observable',
('domain-name', 'ipv4-addr'): 'parse_domain_ip_observable',
('domain-name', 'ipv6-addr'): 'parse_domain_ip_observable',
('domain-name', 'ipv4-addr', 'ipv6-addr'): 'parse_domain_ip_observable',
('domain-name', 'ipv4-addr', 'network-traffic'): 'parse_domain_ip_network_traffic_observable',
('domain-name', 'ipv6-addr', 'network-traffic'): 'parse_domain_ip_network_traffic_observable',
('domain-name', 'ipv4-addr', 'ipv6-addr', 'network-traffic'): 'parse_domain_ip_network_traffic_observable',
('domain-name', 'network-traffic'): 'parse_domain_network_traffic_observable',
('domain-name', 'network-traffic', 'url'): 'parse_url_observable',
('email-addr',): 'parse_email_address_observable',
('email-addr', 'email-message'): 'parse_email_observable',
('email-addr', 'email-message', 'file'): 'parse_email_observable',
('email-message',): 'parse_email_observable',
('file',): 'parse_file_observable',
('file', 'process'): 'parse_process_observable',
('ipv4-addr',): 'parse_ip_address_observable',
('ipv6-addr',): 'parse_ip_address_observable',
('ipv4-addr', 'network-traffic'): 'parse_ip_network_traffic_observable',
('ipv6-addr', 'network-traffic'): 'parse_ip_network_traffic_observable',
('ipv4-addr', 'ipv6-addr', 'network-traffic'): 'parse_ip_network_traffic_observable',
('mac-addr',): 'parse_mac_address_observable',
('mutex',): 'parse_mutex_observable',
('process',): 'parse_process_observable',
('x509-certificate',): 'parse_x509_observable',
('url',): 'parse_url_observable',
('user-account',): 'parse_user_account_observable',
('windows-registry-key',): 'parse_regkey_observable'
}
pattern_mapping = {
('artifact', 'file'): 'parse_file_pattern',
('artifact', 'directory', 'file'): 'parse_file_pattern',
('autonomous-system', ): 'parse_as_pattern',
('autonomous-system', 'ipv4-addr'): 'parse_as_pattern',
('autonomous-system', 'ipv6-addr'): 'parse_as_pattern',
('autonomous-system', 'ipv4-addr', 'ipv6-addr'): 'parse_as_pattern',
('directory',): 'parse_file_pattern',
('directory', 'file'): 'parse_file_pattern',
('domain-name',): 'parse_domain_ip_port_pattern',
('domain-name', 'ipv4-addr'): 'parse_domain_ip_port_pattern',
('domain-name', 'ipv6-addr'): 'parse_domain_ip_port_pattern',
('domain-name', 'ipv4-addr', 'ipv6-addr'): 'parse_domain_ip_port_pattern',
('domain-name', 'ipv4-addr', 'url'): 'parse_url_pattern',
('domain-name', 'ipv6-addr', 'url'): 'parse_url_pattern',
('domain-name', 'ipv4-addr', 'ipv6-addr', 'url'): 'parse_url_pattern',
('domain-name', 'network-traffic'): 'parse_domain_ip_port_pattern',
('domain-name', 'network-traffic', 'url'): 'parse_url_pattern',
('email-addr',): 'parse_email_address_pattern',
('email-message',): 'parse_email_message_pattern',
('file',): 'parse_file_pattern',
('ipv4-addr',): 'parse_ip_address_pattern',
('ipv6-addr',): 'parse_ip_address_pattern',
('ipv4-addr', 'ipv6-addr'): 'parse_ip_address_pattern',
('mac-addr',): 'parse_mac_address_pattern',
('mutex',): 'parse_mutex_pattern',
('network-traffic',): 'parse_network_traffic_pattern',
('process',): 'parse_process_pattern',
('url',): 'parse_url_pattern',
('user-account',): 'parse_user_account_pattern',
('windows-registry-key',): 'parse_regkey_pattern',
('x509-certificate',): 'parse_x509_pattern'
}
pattern_forbidden_relations = (' LIKE ', ' FOLLOWEDBY ', ' MATCHES ', ' ISSUBSET ', ' ISSUPERSET ', ' REPEATS ')
single_attribute_fields = ('type', 'value', 'to_ids')
################################################################################
# OBSERVABLE OBJECTS AND PATTERNS MAPPING. #
################################################################################
address_family_attribute_mapping = {'type': 'text','object_relation': 'address-family'}
as_number_attribute_mapping = {'type': 'AS', 'object_relation': 'asn'}
description_attribute_mapping = {'type': 'text', 'object_relation': 'description'}
asn_subnet_attribute_mapping = {'type': 'ip-src', 'object_relation': 'subnet-announced'}
cc_attribute_mapping = {'type': 'email-dst', 'object_relation': 'cc'}
credential_attribute_mapping = {'type': 'text', 'object_relation': 'password'}
data_attribute_mapping = {'type': 'text', 'object_relation': 'data'}
data_type_attribute_mapping = {'type': 'text', 'object_relation': 'data-type'}
domain_attribute_mapping = {'type': 'domain', 'object_relation': 'domain'}
domain_family_attribute_mapping = {'type': 'text', 'object_relation': 'domain-family'}
dst_port_attribute_mapping = {'type': 'port', 'object_relation': 'dst-port'}
email_attachment_attribute_mapping = {'type': 'email-attachment', 'object_relation': 'attachment'}
email_date_attribute_mapping = {'type': 'datetime', 'object_relation': 'send-date'}
email_subject_attribute_mapping = {'type': 'email-subject', 'object_relation': 'subject'}
encoding_attribute_mapping = {'type': 'text', 'object_relation': 'file-encoding'}
end_datetime_attribute_mapping = {'type': 'datetime', 'object_relation': 'last-seen'}
entropy_mapping = {'type': 'float', 'object_relation': 'entropy'}
filename_attribute_mapping = {'type': 'filename', 'object_relation': 'filename'}
from_attribute_mapping = {'type': 'email-src', 'object_relation': 'from'}
imphash_mapping = {'type': 'imphash', 'object_relation': 'imphash'}
id_attribute_mapping = {'type': 'text', 'object_relation': 'id'}
ip_attribute_mapping = {'type': 'ip-dst', 'object_relation': 'ip'}
issuer_attribute_mapping = {'type': 'text', 'object_relation': 'issuer'}
key_attribute_mapping = {'type': 'regkey', 'object_relation': 'key'}
malware_sample_attribute_mapping = {'type': 'malware-sample', 'object_relation': 'malware-sample'}
mime_type_attribute_mapping = {'type': 'mime-type', 'object_relation': 'mimetype'}
modified_attribute_mapping = {'type': 'datetime', 'object_relation': 'last-modified'}
name_attribute_mapping = {'type': 'text', 'object_relation': 'name'}
network_traffic_ip = {'type': 'ip-{}', 'object_relation': 'ip-{}'}
number_sections_mapping = {'type': 'counter', 'object_relation': 'number-sections'}
password_mapping = {'type': 'text', 'object_relation': 'password'}
path_attribute_mapping = {'type': 'text', 'object_relation': 'path'}
pe_type_mapping = {'type': 'text', 'object_relation': 'type'}
pid_attribute_mapping = {'type': 'text', 'object_relation': 'pid'}
process_command_line_mapping = {'type': 'text', 'object_relation': 'command-line'}
process_creation_time_mapping = {'type': 'datetime', 'object_relation': 'creation-time'}
process_image_mapping = {'type': 'filename', 'object_relation': 'image'}
process_name_mapping = {'type': 'text', 'object_relation': 'name'}
regkey_name_attribute_mapping = {'type': 'text', 'object_relation': 'name'}
references_attribute_mapping = {'type': 'link', 'object_relation': 'references'}
reply_to_attribute_mapping = {'type': 'email-reply-to', 'object_relation': 'reply-to'}
screenshot_attribute_mapping = {'type': 'attachment', 'object_relation': 'screenshot'}
section_name_mapping = {'type': 'text', 'object_relation': 'name'}
serial_number_attribute_mapping = {'type': 'text', 'object_relation': 'serial-number'}
size_attribute_mapping = {'type': 'size-in-bytes', 'object_relation': 'size-in-bytes'}
src_port_attribute_mapping = {'type': 'port', 'object_relation': 'src-port'}
start_datetime_attribute_mapping = {'type': 'datetime', 'object_relation': 'first-seen'}
state_attribute_mapping = {'type': 'text', 'object_relation': 'state'}
summary_attribute_mapping = {'type': 'text', 'object_relation': 'summary'}
to_attribute_mapping = {'type': 'email-dst', 'object_relation': 'to'}
url_attribute_mapping = {'type': 'url', 'object_relation': 'url'}
url_port_attribute_mapping = {'type': 'port', 'object_relation': 'port'}
user_id_mapping = {'type': 'text', 'object_relation': 'username'}
x_mailer_attribute_mapping = {'type': 'email-x-mailer', 'object_relation': 'x-mailer'}
x509_md5_attribute_mapping = {'type': 'x509-fingerprint-md5', 'object_relation': 'x509-fingerprint-md5'}
x509_sha1_attribute_mapping = {'type': 'x509-fingerprint-sha1', 'object_relation': 'x509-fingerprint-sha1'}
x509_sha256_attribute_mapping = {'type': 'x509-fingerprint-sha256', 'object_relation': 'x509-fingerprint-sha256'}
x509_spka_attribute_mapping = {'type': 'text', 'object_relation': 'pubkey-info-algorithm'} # x509 subject public key algorithm
x509_spke_attribute_mapping = {'type': 'text', 'object_relation': 'pubkey-info-exponent'} # x509 subject public key exponent
x509_spkm_attribute_mapping = {'type': 'text', 'object_relation': 'pubkey-info-modulus'} # x509 subject public key modulus
x509_subject_attribute_mapping = {'type': 'text', 'object_relation': 'subject'}
x509_version_attribute_mapping = {'type': 'text', 'object_relation': 'version'}
x509_vna_attribute_mapping = {'type': 'datetime', 'object_relation': 'validity-not-after'} # x509 validity not after
x509_vnb_attribute_mapping = {'type': 'datetime', 'object_relation': 'validity-not-before'} # x509 validity not before
asn_mapping = {'number': as_number_attribute_mapping,
'autonomous-system:number': as_number_attribute_mapping,
'name': description_attribute_mapping,
'autonomous-system:name': description_attribute_mapping,
'ipv4-addr': asn_subnet_attribute_mapping,
'ipv6-addr': asn_subnet_attribute_mapping,
'ipv4-addr:value': asn_subnet_attribute_mapping,
'ipv6-addr:value': asn_subnet_attribute_mapping}
attack_pattern_mapping = {'name': name_attribute_mapping,
'description': summary_attribute_mapping}
attack_pattern_references_mapping = {'mitre-attack': references_attribute_mapping,
'capec': id_attribute_mapping}
course_of_action_mapping = {'description': description_attribute_mapping,
'name': name_attribute_mapping}
credential_mapping = {'credential': credential_attribute_mapping,
'user-account:credential': credential_attribute_mapping,
'user_id': user_id_mapping,
'user-account:user_id': user_id_mapping}
domain_ip_mapping = {'domain-name': domain_attribute_mapping,
'domain-name:value': domain_attribute_mapping,
'ipv4-addr': ip_attribute_mapping,
'ipv6-addr': ip_attribute_mapping,
'ipv4-addr:value': ip_attribute_mapping,
'ipv6-addr:value': ip_attribute_mapping,
'domain-name:resolves_to_refs[*].value': ip_attribute_mapping,
'network-traffic:dst_port': dst_port_attribute_mapping,
'network-traffic:src_port': src_port_attribute_mapping}
email_mapping = {'date': email_date_attribute_mapping,
'email-message:date': email_date_attribute_mapping,
'email-message:to_refs[*].value': to_attribute_mapping,
'email-message:cc_refs[*].value': cc_attribute_mapping,
'subject': email_subject_attribute_mapping,
'email-message:subject': email_subject_attribute_mapping,
'X-Mailer': x_mailer_attribute_mapping,
'email-message:additional_header_fields.x_mailer': x_mailer_attribute_mapping,
'Reply-To': reply_to_attribute_mapping,
'email-message:additional_header_fields.reply_to': reply_to_attribute_mapping,
'email-message:from_ref.value': from_attribute_mapping,
'email-addr:value': to_attribute_mapping}
email_references_mapping = {'attachment': email_attachment_attribute_mapping,
'cc_refs': cc_attribute_mapping,
'from_ref': from_attribute_mapping,
'screenshot': screenshot_attribute_mapping,
'to_refs': to_attribute_mapping}
file_mapping = {'artifact:mime_type': mime_type_attribute_mapping,
'file:content_ref.mime_type': mime_type_attribute_mapping,
'mime_type': mime_type_attribute_mapping,
'file:mime_type': mime_type_attribute_mapping,
'name': filename_attribute_mapping,
'file:name': filename_attribute_mapping,
'name_enc': encoding_attribute_mapping,
'file:name_enc': encoding_attribute_mapping,
'file:parent_directory_ref.path': path_attribute_mapping,
'directory:path': path_attribute_mapping,
'size': size_attribute_mapping,
'file:size': size_attribute_mapping}
network_traffic_mapping = {'dst_port':dst_port_attribute_mapping,
'src_port': src_port_attribute_mapping,
'network-traffic:dst_port': dst_port_attribute_mapping,
'network-traffic:src_port': src_port_attribute_mapping}
ip_port_mapping = {'value': domain_attribute_mapping,
'domain-name:value': domain_attribute_mapping,
'network-traffic:dst_ref.value': {'type': 'ip-dst', 'object_relation': 'ip-dst'},
'network-traffic:src_ref.value': {'type': 'ip-src', 'object_relation': 'ip-src'}}
ip_port_mapping.update(network_traffic_mapping)
ip_port_references_mapping = {'domain-name': domain_attribute_mapping,
'ipv4-addr': network_traffic_ip,
'ipv6-addr': network_traffic_ip}
network_socket_extension_mapping = {'address_family': address_family_attribute_mapping,
"network-traffic:extensions.'socket-ext'.address_family": address_family_attribute_mapping,
'protocol_family': domain_family_attribute_mapping,
"network-traffic:extensions.'socket-ext'.protocol_family": domain_family_attribute_mapping,
'is_blocking': state_attribute_mapping,
"network-traffic:extensions.'socket-ext'.is_blocking": state_attribute_mapping,
'is_listening': state_attribute_mapping,
"network-traffic:extensions.'socket-ext'.is_listening": state_attribute_mapping}
network_traffic_references_mapping = {'domain-name': {'type': 'hostname', 'object_relation': 'hostname-{}'},
'ipv4-addr': network_traffic_ip,
'ipv6-addr': network_traffic_ip}
pe_mapping = {'pe_type': pe_type_mapping, 'number_of_sections': number_sections_mapping, 'imphash': imphash_mapping}
pe_section_mapping = {'name': section_name_mapping, 'size': size_attribute_mapping, 'entropy': entropy_mapping}
hash_types = ('MD5', 'SHA-1', 'SHA-256', 'SHA-224', 'SHA-384', 'SHA-512', 'ssdeep', 'tlsh')
for hash_type in hash_types:
misp_hash_type = hash_type.replace('-', '').lower()
attribute = {'type': misp_hash_type, 'object_relation': misp_hash_type}
file_mapping[hash_type] = attribute
file_mapping.update({f"file:hashes.'{feature}'": attribute for feature in (hash_type, misp_hash_type)})
file_mapping.update({f"file:hashes.{feature}": attribute for feature in (hash_type, misp_hash_type)})
pe_section_mapping[hash_type] = attribute
pe_section_mapping[misp_hash_type] = attribute
process_mapping = {'name': process_name_mapping,
'process:name': process_name_mapping,
'pid': pid_attribute_mapping,
'process:pid': pid_attribute_mapping,
'created': process_creation_time_mapping,
'process:created': process_creation_time_mapping,
'command_line': process_command_line_mapping,
'process:command_line': process_command_line_mapping,
'process:parent_ref.pid': {'type': 'text', 'object_relation': 'parent-pid'},
'process:child_refs[*].pid': {'type': 'text', 'object_relation': 'child-pid'},
'process:binary_ref.name': process_image_mapping}
child_process_reference_mapping = {'pid': {'type': 'text', 'object_relation': 'child-pid'}}
parent_process_reference_mapping = {'command_line': {'type': 'text', 'object_relation': 'parent-command-line'},
'pid': {'type': 'text', 'object_relation': 'parent-pid'},
'process-name': {'type': 'text', 'object_relation': 'parent-process-name'}}
regkey_mapping = {'data': data_attribute_mapping,
'windows-registry-key:values.data': data_attribute_mapping,
'data_type': data_type_attribute_mapping,
'windows-registry-key:values.data_type': data_type_attribute_mapping,
'modified': modified_attribute_mapping,
'windows-registry-key:modified': modified_attribute_mapping,
'name': regkey_name_attribute_mapping,
'windows-registry-key:values.name': regkey_name_attribute_mapping,
'key': key_attribute_mapping,
'windows-registry-key:key': key_attribute_mapping,
'windows-registry-key:value': {'type': 'text', 'object_relation': 'hive'}
}
url_mapping = {'url': url_attribute_mapping,
'url:value': url_attribute_mapping,
'domain-name': domain_attribute_mapping,
'domain-name:value': domain_attribute_mapping,
'network-traffic': url_port_attribute_mapping,
'network-traffic:dst_port': url_port_attribute_mapping,
'ipv4-addr:value': ip_attribute_mapping,
'ipv6-addr:value': ip_attribute_mapping
}
user_account_mapping = {'account_created': {'type': 'datetime', 'object_relation': 'created'},
'account_expires': {'type': 'datetime', 'object_relation': 'expires'},
'account_first_login': {'type': 'datetime', 'object_relation': 'first_login'},
'account_last_login': {'type': 'datetime', 'object_relation': 'last_login'},
'account_login': user_id_mapping,
'account_type': {'type': 'text', 'object_relation': 'account-type'},
'can_escalate_privs': {'type': 'boolean', 'object_relation': 'can_escalate_privs'},
'credential': credential_attribute_mapping,
'credential_last_changed': {'type': 'datetime', 'object_relation': 'password_last_changed'},
'display_name': {'type': 'text', 'object_relation': 'display-name'},
'gid': {'type': 'text', 'object_relation': 'group-id'},
'home_dir': {'type': 'text', 'object_relation': 'home_dir'},
'is_disabled': {'type': 'boolean', 'object_relation': 'disabled'},
'is_privileged': {'type': 'boolean', 'object_relation': 'privileged'},
'is_service_account': {'type': 'boolean', 'object_relation': 'is_service_account'},
'shell': {'type': 'text', 'object_relation': 'shell'},
'user_id': {'type': 'text', 'object_relation': 'user-id'}}
vulnerability_mapping = {'name': id_attribute_mapping,
'description': summary_attribute_mapping}
x509_mapping = {'issuer': issuer_attribute_mapping,
'x509-certificate:issuer': issuer_attribute_mapping,
'serial_number': serial_number_attribute_mapping,
'x509-certificate:serial_number': serial_number_attribute_mapping,
'subject': x509_subject_attribute_mapping,
'x509-certificate:subject': x509_subject_attribute_mapping,
'subject_public_key_algorithm': x509_spka_attribute_mapping,
'x509-certificate:subject_public_key_algorithm': x509_spka_attribute_mapping,
'subject_public_key_exponent': x509_spke_attribute_mapping,
'x509-certificate:subject_public_key_exponent': x509_spke_attribute_mapping,
'subject_public_key_modulus': x509_spkm_attribute_mapping,
'x509-certificate:subject_public_key_modulus': x509_spkm_attribute_mapping,
'validity_not_before': x509_vnb_attribute_mapping,
'x509-certificate:validity_not_before': x509_vnb_attribute_mapping,
'validity_not_after': x509_vna_attribute_mapping,
'x509-certificate:validity_not_after': x509_vna_attribute_mapping,
'version': x509_version_attribute_mapping,
'x509-certificate:version': x509_version_attribute_mapping,
'SHA-1': x509_sha1_attribute_mapping,
"x509-certificate:hashes.'sha1'": x509_sha1_attribute_mapping,
'SHA-256': x509_sha256_attribute_mapping,
"x509-certificate:hashes.'sha256'": x509_sha256_attribute_mapping,
'MD5': x509_md5_attribute_mapping,
"x509-certificate:hashes.'md5'": x509_md5_attribute_mapping,
}
attachment_types = ('file:content_ref.name', 'file:content_ref.payload_bin',
'artifact:x_misp_text_name', 'artifact:payload_bin',
"file:hashes.'MD5'", "file:content_ref.hashes.'MD5'",
'file:name')
connection_protocols = {"IP": "3", "ICMP": "3", "ARP": "3",
"TCP": "4", "UDP": "4",
"HTTP": "7", "HTTPS": "7", "FTP": "7"}

File diff suppressed because one or more lines are too long

View File

@ -1,3 +1,4 @@
from .expansion import * # noqa
from .import_mod import * # noqa
from .export_mod import * # noqa
from .action_mod import * # noqa

View File

@ -0,0 +1 @@
__all__ = ['testaction', 'mattermost']

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,70 @@
#!/usr/bin/env python
from jinja2.sandbox import SandboxedEnvironment
default_template = """
# Tutorial: How to use jinja2 templating
:warning: For these examples, we consider the module received data under the MISP core format
1. You can use the dot `.` notation or the subscript syntax `[]` to access attributes of a variable
- `{% raw %}{{ Event.info }}{% endraw %}` -> {{ Event.info }}
- `{% raw %}{{ Event['info'] }}{% endraw %}` -> {{ Event['info'] }}
2. Jinja2 allows you to easily create list:
```{% raw %}
{% for attribute in Event.Attribute %}
- {{ attribute.value }}
{% endfor %}
{% endraw %}```
Gives:
{% for attribute in Event.Attribute %}
- {{ attribute.value }}
{% endfor %}
3. Jinja2 allows you to add logic
```{% raw %}
{% if "tlp:white" in Event.Tag %}
- This Event has the TLP:WHITE tag
{% else %}
- This Event doesn't have the TLP:WHITE tag
{% endif %}
{% endraw %}```
Gives:
{% if "tlp:white" in Event.Tag %}
- This Event has the TLP:WHITE tag
{% else %}
- This Event doesn't have the TLP:WHITE tag
{% endif %}
## Jinja2 allows you to modify variables by using filters
3. The `reverse` filter
- `{% raw %}{{ Event.info | reverse }}{% endraw %}` -> {{ Event.info | reverse }}
4. The `format` filter
- `{% raw %}{{ "%s :: %s" | format(Event.Attribute[0].type, Event.Attribute[0].value) }}{% endraw %}` -> {{ "%s :: %s" | format(Event.Attribute[0].type, Event.Attribute[0].value) }}
5.The `groupby` filter
```{% raw %}
{% for type, attributes in Event.Attribute|groupby("type") %}
- {{ type }}{% for attribute in attributes %}
- {{ attribute.value }}
{% endfor %}
{% endfor %}
{% endraw %}```
Gives:
{% for type, attributes in Event.Attribute|groupby("type") %}
- {{ type }}{% for attribute in attributes %}
- {{ attribute.value }}
{% endfor %}
{% endfor %}
"""
def renderTemplate(data, template=default_template):
env = SandboxedEnvironment()
return env.from_string(template).render(data)

View File

@ -0,0 +1,97 @@
import json
from mattermostdriver import Driver
from ._utils import utils
misperrors = {'error': 'Error'}
# config fields that your code expects from the site admin
moduleconfig = {
'params': {
'mattermost_hostname': {
'type': 'string',
'description': 'The Mattermost domain',
'value': 'example.mattermost.com',
},
'bot_access_token': {
'type': 'string',
'description': 'Access token generated when you created the bot account',
},
'channel_id': {
'type': 'string',
'description': 'The channel you added the bot to',
},
'message_template': {
'type': 'large_string',
'description': 'The template to be used to generate the message to be posted',
'value': 'The **template** will be rendered using *Jinja2*!',
},
},
# Blocking modules break the exection of the current of action
'blocking': False,
# Indicates whether parts of the data passed to this module should be filtered. Filtered data can be found under the `filteredItems` key
'support_filters': True,
# Indicates whether the data passed to this module should be compliant with the MISP core format
'expect_misp_core_format': False,
}
# returns either "boolean" or "data"
# Boolean is used to simply signal that the execution has finished.
# For blocking modules the actual boolean value determines whether we break execution
returns = 'boolean'
moduleinfo = {'version': '0.1', 'author': 'Sami Mokaddem',
'description': 'Simplistic module to send message to a Mattermost channel.',
'module-type': ['action']}
def createPost(request):
params = request['params']
mm = Driver({
'url': params['mattermost_hostname'],
'token': params['bot_access_token'],
'scheme': 'https',
'basepath': '/api/v4',
'port': 443,
})
mm.login()
data = {}
if 'matchingData' in request:
data = request['matchingData']
else:
data = request['data']
if params['message_template']:
message = utils.renderTemplate(data, params['message_template'])
else:
message = '```\n{}\n```'.format(json.dumps(data))
mm.posts.create_post(options={
'channel_id': params['channel_id'],
'message': message
})
return True
def handler(q=False):
if q is False:
return False
request = json.loads(q)
createPost(request)
r = {"data": True}
return r
def introspection():
modulesetup = {}
try:
modulesetup['config'] = moduleconfig
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,59 @@
import json
from ._utils import utils
misperrors = {'error': 'Error'}
# config fields that your code expects from the site admin
moduleconfig = {
'params': {
'foo': {
'type': 'string',
'description': 'blablabla',
'value': 'xyz'
},
'Data extraction path': {
# Extracted data can be found under the `matchingData` key
'type': 'hash_path',
'description': 'Only post content extracted from this path',
'value': 'Attribute.{n}.AttributeTag.{n}.Tag.name',
},
},
# Blocking modules break the exection of the current of action
'blocking': False,
# Indicates whether parts of the data passed to this module should be extracted. Extracted data can be found under the `filteredItems` key
'support_filters': False,
# Indicates whether the data passed to this module should be compliant with the MISP core format
'expect_misp_core_format': False,
}
# returns either "boolean" or "data"
# Boolean is used to simply signal that the execution has finished.
# For blocking modules the actual boolean value determines whether we break execution
returns = 'boolean'
moduleinfo = {'version': '0.1', 'author': 'Andras Iklody',
'description': 'This module is merely a test, always returning true. Triggers on event publishing.',
'module-type': ['action']}
def handler(q=False):
if q is False:
return False
request = json.loads(q) # noqa
success = True
r = {"data": success}
return r
def introspection():
modulesetup = {}
try:
modulesetup['config'] = moduleconfig
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,4 +1,3 @@
from . import _vmray # noqa
import os
import sys
@ -18,7 +17,8 @@ __all__ = ['cuckoo_submit', 'vmray_submit', 'bgpranking', 'circl_passivedns', 'c
'virustotal_public', 'apiosintds', 'urlscan', 'securitytrails', 'apivoid',
'assemblyline_submit', 'assemblyline_query', 'ransomcoindb', 'malwarebazaar',
'lastline_query', 'lastline_submit', 'sophoslabs_intelix', 'cytomic_orion', 'censys_enrich',
'trustar_enrich', 'recordedfuture', 'html_to_markdown', 'socialscan']
'trustar_enrich', 'recordedfuture', 'html_to_markdown', 'socialscan', 'passive-ssh',
'qintel_qsentry', 'mwdb', 'hashlookup', 'mmdb_lookup', 'ipqs_fraud_and_risk_scoring', 'clamav', 'jinja_template_rendering','hyasinsight']
minimum_required_fields = ('type', 'uuid', 'value')

View File

@ -1,148 +0,0 @@
#!/usr/bin/env python3
"""Python client library for VMRay REST API"""
import base64
import datetime
import os.path
import requests
import urllib.parse
# disable nasty certification warning
# pylint: disable=no-member
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
try:
import urllib3
try:
urllib3.disable_warnings()
except AttributeError:
pass
except ImportError:
pass
# pylint: disable=
class VMRayRESTAPIError(Exception):
"""Exception class that is used when API returns an error"""
def __init__(self, *args, **kwargs):
self.status_code = kwargs.pop("status_code", None)
Exception.__init__(self, *args, **kwargs)
def handle_rest_api_result(result):
"""Handle result of API request (check for errors)"""
if (result.status_code < 200) or (result.status_code > 299):
try:
json_result = result.json()
except ValueError:
raise VMRayRESTAPIError("API returned error %u: %s" % (result.status_code, result.text), status_code=result.status_code)
raise VMRayRESTAPIError(json_result.get("error_msg", "Unknown error"), status_code=result.status_code)
class VMRayRESTAPI(object):
"""VMRay REST API class"""
def __init__(self, server, api_key, verify_cert=True):
# split server URL into components
url_desc = urllib.parse.urlsplit(server)
# assume HTTPS if no scheme is specified
if url_desc.scheme == "":
server = "https://" + server
# save variables
self.server = server
self.api_key = api_key
self.verify_cert = verify_cert
def call(self, http_method, api_path, params=None, raw_data=False):
"""Call VMRay REST API"""
# get function of requests package
requests_func = getattr(requests, http_method.lower())
# parse parameters
req_params = {}
file_params = {}
if params is not None:
for key, value in params.items():
if isinstance(value, (datetime.date,
datetime.datetime,
float,
int)):
req_params[key] = str(value)
elif isinstance(value, str):
req_params[key] = str(value)
elif isinstance(value, dict):
filename = value["filename"]
sample = value["data"]
file_params[key] = (filename, sample, "application/octet-stream")
elif hasattr(value, "read"):
filename = os.path.split(value.name)[1]
# For the following block refer to DEV-1820
try:
filename.decode("ASCII")
except (UnicodeDecodeError, UnicodeEncodeError):
b64_key = key + "name_b64enc"
byte_value = filename.encode("utf-8")
b64_value = base64.b64encode(byte_value)
filename = "@param=%s" % b64_key
req_params[b64_key] = b64_value
file_params[key] = (filename, value, "application/octet-stream")
else:
raise VMRayRESTAPIError("Parameter \"%s\" has unknown type \"%s\"" % (key, type(value)))
# construct request
if file_params:
files = file_params
else:
files = None
# we need to adjust some stuff for POST requests
if http_method.lower() == "post":
req_data = req_params
req_params = None
else:
req_data = None
# do request
result = requests_func(self.server + api_path, data=req_data, params=req_params, headers={"Authorization": "api_key " + self.api_key}, files=files, verify=self.verify_cert, stream=raw_data)
handle_rest_api_result(result)
if raw_data:
return result.raw
# parse result
try:
json_result = result.json()
except ValueError:
raise ValueError("API returned invalid JSON: %s" % (result.text))
# if there are no cached elements then return the data
if "continuation_id" not in json_result:
return json_result.get("data", None)
data = json_result["data"]
# get cached results
while "continuation_id" in json_result:
# send request to server
result = requests.get("%s/rest/continuation/%u" % (self.server, json_result["continuation_id"]), headers={"Authorization": "api_key " + self.api_key}, verify=self.verify_cert)
handle_rest_api_result(result)
# parse result
try:
json_result = result.json()
except ValueError:
raise ValueError("API returned invalid JSON: %s" % (result.text))
data.extend(json_result["data"])
return data

View File

@ -4,8 +4,8 @@ from . import check_input_attribute, standard_error_message
from pymisp import MISPAttribute, MISPEvent, MISPObject
misperrors = {'error': 'Error'}
mispattributes = {'input': ['domain', 'hostname'], 'format': 'misp_standard'}
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
mispattributes = {'input': ['domain', 'hostname', 'email', 'email-src', 'email-dst', 'email-reply-to', 'dns-soa-email', 'target-email', 'whois-registrant-email'], 'format': 'misp_standard'}
moduleinfo = {'version': '0.2', 'author': 'Christian Studer',
'description': 'On demand query API for APIVoid.',
'module-type': ['expansion', 'hover']}
moduleconfig = ['apikey']
@ -43,6 +43,31 @@ class APIVoidParser():
ssl = requests.get(f'{self.url.format("sslinfo", apikey)}host={self.attribute.value}').json()
self._parse_ssl_certificate(ssl['data']['certificate'])
def handle_email(self, apikey):
feature = 'emailverify'
if requests.get(f'{self.url.format(feature, apikey)}stats').json()['credits_remained'] < 0.06:
self.result = {'error': 'You do not have enough APIVoid credits to proceed your request.'}
return
emaillookup = requests.get(f'{self.url.format(feature, apikey)}email={self.attribute.value}').json()
email_verification = MISPObject('apivoid-email-verification')
boolean_attributes = ['valid_format', 'suspicious_username', 'suspicious_email', 'dirty_words_username',
'suspicious_email', 'valid_tld', 'disposable', 'has_a_records', 'has_mx_records',
'has_spf_records', 'is_spoofable', 'dmarc_configured', 'dmarc_enforced', 'free_email',
'russian_free_email', 'china_free_email', 'suspicious_domain', 'dirty_words_domain',
'domain_popular', 'risky_tld', 'police_domain', 'government_domain', 'educational_domain',
'should_block']
for boolean_attribute in boolean_attributes:
email_verification.add_attribute(boolean_attribute,
**{'type': 'boolean', 'value': emaillookup['data'][boolean_attribute]})
email_verification.add_attribute('email', **{'type': 'email', 'value': emaillookup['data']['email']})
email_verification.add_attribute('username', **{'type': 'text', 'value': emaillookup['data']['username']})
email_verification.add_attribute('role_address',
**{'type': 'boolean', 'value': emaillookup['data']['role_address']})
email_verification.add_attribute('domain', **{'type': 'domain', 'value': emaillookup['data']['domain']})
email_verification.add_attribute('score', **{'type': 'float', 'value': emaillookup['data']['score']})
email_verification.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(email_verification)
def _handle_dns_record(self, item, record_type, relationship):
dns_record = MISPObject('dns-record')
dns_record.add_attribute('queried-domain', type='domain', value=item['host'])
@ -82,7 +107,10 @@ def handler(q=False):
return {'error': 'Unsupported attribute type.'}
apikey = request['config']['apikey']
apivoid_parser = APIVoidParser(attribute)
apivoid_parser.parse_domain(apikey)
if attribute['type'] in ['domain', 'hostname']:
apivoid_parser.parse_domain(apikey)
else:
apivoid_parser.handle_email(apikey)
return apivoid_parser.get_results()

View File

@ -11,7 +11,7 @@ mispattributes = {'input': ['link'], 'format': 'misp_standard'}
moduleinfo = {'version': '1', 'author': 'Christian Studer',
'description': 'Query AssemblyLine with a report URL to get the parsed data.',
'module-type': ['expansion']}
moduleconfig = ["apiurl", "user_id", "apikey", "password"]
moduleconfig = ["apiurl", "user_id", "apikey", "password", "verifyssl"]
class AssemblyLineParser():
@ -125,7 +125,7 @@ def parse_config(apiurl, user_id, config):
error = {"error": "Please provide your AssemblyLine API key or Password."}
if config.get('apikey'):
try:
return Client(apiurl, apikey=(user_id, config['apikey']))
return Client(apiurl, apikey=(user_id, config['apikey']), verify=config['verifyssl'])
except ClientError as e:
error['error'] = f'Error while initiating a connection with AssemblyLine: {e.__str__()}'
if config.get('password'):

View File

@ -7,7 +7,7 @@ from urllib.parse import urljoin
moduleinfo = {"version": 1, "author": "Christian Studer", "module-type": ["expansion"],
"description": "Submit files or URLs to AssemblyLine"}
moduleconfig = ["apiurl", "user_id", "apikey", "password"]
moduleconfig = ["apiurl", "user_id", "apikey", "password", "verifyssl"]
mispattributes = {"input": ["attachment", "malware-sample", "url"],
"output": ["link"]}
@ -16,12 +16,12 @@ def parse_config(apiurl, user_id, config):
error = {"error": "Please provide your AssemblyLine API key or Password."}
if config.get('apikey'):
try:
return Client(apiurl, apikey=(user_id, config['apikey']))
return Client(apiurl, apikey=(user_id, config['apikey']), verify=config['verifyssl'])
except ClientError as e:
error['error'] = f'Error while initiating a connection with AssemblyLine: {e.__str__()}'
if config.get('password'):
try:
return Client(apiurl, auth=(user_id, config['password']))
return Client(apiurl, auth=(user_id, config['password']), verify=config['verifyssl'])
except ClientError as e:
error['error'] = f'Error while initiating a connection with AssemblyLine: {e.__str__()}'
return error

View File

@ -1,15 +1,26 @@
# encoding: utf-8
import json
import configparser
import base64
import codecs
import censys.common.config
from dateutil.parser import isoparse
from . import check_input_attribute, standard_error_message
from pymisp import MISPAttribute, MISPEvent, MISPObject
try:
import censys.base
import censys.ipv4
import censys.websites
import censys.certificates
#needed in order to overwrite the censys module intent of creating config files in the home folder of the proccess owner
#--
def get_config_over() -> configparser.ConfigParser:
config = configparser.ConfigParser()
config[censys.common.config.DEFAULT] = censys.common.config.default_config
return config
censys.common.config.get_config = get_config_over
#--
from censys.search import CensysHosts
from censys.search import CensysCertificates
from censys.common.base import *
except ImportError:
print("Censys module not installed. Try 'pip install censys'")
@ -20,8 +31,11 @@ mispattributes = {'input': ['ip-src', 'ip-dst', 'domain', 'hostname', 'hostname|
moduleinfo = {'version': '0.1', 'author': 'Loïc Fortemps',
'description': 'Censys.io expansion module', 'module-type': ['expansion', 'hover']}
api_id = None
api_secret = None
def handler(q=False):
global api_id, api_secret
if q is False:
return False
request = json.loads(q)
@ -46,7 +60,6 @@ def handler(q=False):
attribute = MISPAttribute()
attribute.from_dict(**request['attribute'])
# Lists to accomodate multi-types attribute
conn = list()
types = list()
values = list()
results = list()
@ -65,26 +78,29 @@ def handler(q=False):
types.append(attribute.type)
values.append(attribute.value)
found = False
for t in types:
# ip, ip-src or ip-dst
if t[:2] == "ip":
conn.append(censys.ipv4.CensysIPv4(api_id=api_id, api_secret=api_secret))
elif t == 'domain' or t == "hostname":
conn.append(censys.websites.CensysWebsites(api_id=api_id, api_secret=api_secret))
elif 'x509-fingerprint' in t:
conn.append(censys.certificates.CensysCertificates(api_id=api_id, api_secret=api_secret))
found = True
for c in conn:
val = values.pop(0)
try:
r = c.view(val)
results.append(parse_response(r, attribute))
found = True
except censys.base.CensysNotFoundException:
found = False
except Exception:
misperrors['error'] = "Connection issue"
value = values.pop(0)
# ip, ip-src or ip-dst
if t[:2] == "ip":
r = CensysHosts(api_id, api_secret).view(value)
results.append(parse_response(r, attribute))
found = True
elif t == 'domain' or t == "hostname":
# get ips
endpoint = CensysHosts(api_id, api_secret)
for r_list in endpoint.search(query=value, per_page=5, pages=1):
for r in r_list:
results.append(parse_response(r, attribute))
found = True
elif 'x509-fingerprint-sha256' in t:
# use api_v1 as Certificates endpoint in api_v2 doesn't yet provide all the details
r = CensysCertificates(api_id, api_secret).view(value)
results.append(parse_response(r, attribute))
found = True
except CensysException as e:
misperrors['error'] = "ERROR: param {} / response: {}".format(value, e)
return misperrors
if not found:
@ -98,38 +114,43 @@ def parse_response(censys_output, attribute):
misp_event = MISPEvent()
misp_event.add_attribute(**attribute)
# Generic fields (for IP/Websites)
if "autonomous_system" in censys_output:
cen_as = censys_output['autonomous_system']
if censys_output.get('autonomous_system'):
cen_as = censys_output.get('autonomous_system')
asn_object = MISPObject('asn')
asn_object.add_attribute('asn', value=cen_as["asn"])
asn_object.add_attribute('description', value=cen_as['name'])
asn_object.add_attribute('subnet-announced', value=cen_as['routed_prefix'])
asn_object.add_attribute('country', value=cen_as['country_code'])
asn_object.add_attribute('asn', value=cen_as.get("asn"))
asn_object.add_attribute('description', value=cen_as.get('name'))
asn_object.add_attribute('subnet-announced', value=cen_as.get('routed_prefix'))
asn_object.add_attribute('country', value=cen_as.get('country_code'))
asn_object.add_reference(attribute.uuid, 'associated-to')
misp_event.add_object(**asn_object)
if "ip" in censys_output and "ports" in censys_output:
if censys_output.get('ip') and len(censys_output.get('services')): #"ports" in censys_output
ip_object = MISPObject('ip-port')
ip_object.add_attribute('ip', value=censys_output['ip'])
for p in censys_output['ports']:
ip_object.add_attribute('dst-port', value=p)
ip_object.add_attribute('ip', value=censys_output.get('ip'))
for serv in censys_output.get('services'):
if serv.get('port'):
ip_object.add_attribute('dst-port', value=serv.get('port'))
ip_object.add_reference(attribute.uuid, 'associated-to')
misp_event.add_object(**ip_object)
# We explore all ports to find https or ssh services
for k in censys_output.keys():
if not isinstance(censys_output[k], dict):
for serv in censys_output.get('services', []):
if not isinstance(serv, dict):
continue
if 'https' in censys_output[k]:
if serv.get('service_name').lower() == 'http' and serv.get('certificate', None):
try:
cert = censys_output[k]['https']['tls']['certificate']
cert_obj = get_certificate_object(cert, attribute)
misp_event.add_object(**cert_obj)
cert = serv.get('certificate', None)
if cert:
# TODO switch to api_v2 once available
# use api_v1 as Certificates endpoint in api_v2 doesn't yet provide all the details
cert_details = CensysCertificates(api_id, api_secret).view(cert)
cert_obj = get_certificate_object(cert_details, attribute)
misp_event.add_object(**cert_obj)
except KeyError:
print("Error !")
if 'ssh' in censys_output[k]:
if serv.get('ssh') and serv.get('service_name').lower() == 'ssh':
try:
cert = censys_output[k]['ssh']['v2']['server_host_key']
cert = serv.get('ssh').get('server_host_key').get('fingerprint_sha256')
# TODO enable once the type is merged
# misp_event.add_attribute(type='hasshserver-sha256', value=cert['fingerprint_sha256'])
except KeyError:
@ -144,20 +165,20 @@ def parse_response(censys_output, attribute):
if "location" in censys_output:
loc_obj = MISPObject('geolocation')
loc = censys_output['location']
loc_obj.add_attribute('latitude', value=loc['latitude'])
loc_obj.add_attribute('longitude', value=loc['longitude'])
loc_obj.add_attribute('latitude', value=loc.get('coordinates', {}).get('latitude', None))
loc_obj.add_attribute('longitude', value=loc.get('coordinates', {}).get('longitude', None))
if 'city' in loc:
loc_obj.add_attribute('city', value=loc['city'])
loc_obj.add_attribute('country', value=loc['country'])
loc_obj.add_attribute('city', value=loc.get('city'))
loc_obj.add_attribute('country', value=loc.get('country'))
if 'postal_code' in loc:
loc_obj.add_attribute('zipcode', value=loc['postal_code'])
loc_obj.add_attribute('zipcode', value=loc.get('postal_code'))
if 'province' in loc:
loc_obj.add_attribute('region', value=loc['province'])
loc_obj.add_attribute('region', value=loc.get('province'))
loc_obj.add_reference(attribute.uuid, 'associated-to')
misp_event.add_object(**loc_obj)
event = json.loads(misp_event.to_json())
return {'Object': event['Object'], 'Attribute': event['Attribute']}
return {'Object': event.get('Object', []), 'Attribute': event.get('Attribute', [])}
# In case of multiple enrichment (ip and domain), we need to filter out similar objects
@ -166,24 +187,23 @@ def remove_duplicates(results):
# Only one enrichment was performed so no duplicate
if len(results) == 1:
return results[0]
elif len(results) == 2:
final_result = results[0]
obj_l2 = results[1]['Object']
for o2 in obj_l2:
if o2['name'] == "asn":
key = "asn"
elif o2['name'] == "ip-port":
key = "ip"
elif o2['name'] == "x509":
key = "x509-fingerprint-sha256"
elif o2['name'] == "geolocation":
key = "latitude"
if not check_if_present(o2, key, final_result['Object']):
final_result['Object'].append(o2)
return final_result
else:
return []
final_result = results[0]
for i,result in enumerate(results[1:]):
obj_l = results[i+1].get('Object', [])
for o2 in obj_l:
if o2['name'] == "asn":
key = "asn"
elif o2['name'] == "ip-port":
key = "ip"
elif o2['name'] == "x509":
key = "x509-fingerprint-sha256"
elif o2['name'] == "geolocation":
key = "latitude"
if not check_if_present(o2, key, final_result.get('Object', [])):
final_result['Object'].append(o2)
return final_result
def check_if_present(object, attribute_name, list_objects):
@ -253,4 +273,4 @@ def introspection():
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
return moduleinfo

View File

@ -1,42 +1,44 @@
import json
import requests
from . import check_input_attribute, standard_error_message
from falconpy import Intel
from pymisp import MISPAttribute, MISPEvent
moduleinfo = {'version': '0.1',
moduleinfo = {'version': '0.2',
'author': 'Christophe Vandeplas',
'description': 'Module to query CrowdStrike Falcon.',
'module-type': ['expansion']}
'module-type': ['expansion', 'hover']}
moduleconfig = ['api_id', 'apikey']
misperrors = {'error': 'Error'}
misp_types_in = ['domain', 'email-attachment', 'email-dst', 'email-reply-to', 'email-src', 'email-subject',
misp_type_in = ['domain', 'email-attachment', 'email-dst', 'email-reply-to', 'email-src', 'email-subject',
'filename', 'hostname', 'ip', 'ip-src', 'ip-dst', 'md5', 'mutex', 'regkey', 'sha1', 'sha256', 'uri', 'url',
'user-agent', 'whois-registrant-email', 'x509-fingerprint-md5']
mapping_out = { # mapping between the MISP attributes types and the compatible CrowdStrike indicator types.
'domain': {'types': 'hostname', 'to_ids': True},
'email_address': {'types': 'email-src', 'to_ids': True},
'email_subject': {'types': 'email-subject', 'to_ids': True},
'file_name': {'types': 'filename', 'to_ids': True},
'hash_md5': {'types': 'md5', 'to_ids': True},
'hash_sha1': {'types': 'sha1', 'to_ids': True},
'hash_sha256': {'types': 'sha256', 'to_ids': True},
'ip_address': {'types': 'ip-dst', 'to_ids': True},
'ip_address_block': {'types': 'ip-dst', 'to_ids': True},
'mutex_name': {'types': 'mutex', 'to_ids': True},
'registry': {'types': 'regkey', 'to_ids': True},
'url': {'types': 'url', 'to_ids': True},
'user_agent': {'types': 'user-agent', 'to_ids': True},
'x509_serial': {'types': 'x509-fingerprint-md5', 'to_ids': True},
mapping_out = { # mapping between the MISP attributes type and the compatible CrowdStrike indicator types.
'domain': {'type': 'hostname', 'to_ids': True},
'email_address': {'type': 'email-src', 'to_ids': True},
'email_subject': {'type': 'email-subject', 'to_ids': True},
'file_name': {'type': 'filename', 'to_ids': True},
'hash_md5': {'type': 'md5', 'to_ids': True},
'hash_sha1': {'type': 'sha1', 'to_ids': True},
'hash_sha256': {'type': 'sha256', 'to_ids': True},
'ip_address': {'type': 'ip-dst', 'to_ids': True},
'ip_address_block': {'type': 'ip-dst', 'to_ids': True},
'mutex_name': {'type': 'mutex', 'to_ids': True},
'registry': {'type': 'regkey', 'to_ids': True},
'url': {'type': 'url', 'to_ids': True},
'user_agent': {'type': 'user-agent', 'to_ids': True},
'x509_serial': {'type': 'x509-fingerprint-md5', 'to_ids': True},
'actors': {'types': 'threat-actor'},
'malware_families': {'types': 'text', 'categories': 'Attribution'}
'actors': {'type': 'threat-actor', 'category': 'Attribution'},
'malware_families': {'type': 'text', 'category': 'Attribution'}
}
misp_types_out = [item['types'] for item in mapping_out.values()]
mispattributes = {'input': misp_types_in, 'output': misp_types_out}
misp_type_out = [item['type'] for item in mapping_out.values()]
mispattributes = {'input': misp_type_in, 'format': 'misp_standard'}
def handler(q=False):
if q is False:
return False
request = json.loads(q)
#validate CrowdStrike params
if (request.get('config')):
if (request['config'].get('apikey') is None):
misperrors['error'] = 'CrowdStrike apikey is missing'
@ -44,41 +46,64 @@ def handler(q=False):
if (request['config'].get('api_id') is None):
misperrors['error'] = 'CrowdStrike api_id is missing'
return misperrors
#validate attribute
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
attribute = request.get('attribute')
if not any(input_type == attribute.get('type') for input_type in misp_type_in):
return {'error': 'Unsupported attribute type.'}
client = CSIntelAPI(request['config']['api_id'], request['config']['apikey'])
attribute = MISPAttribute()
attribute.from_dict(**request.get('attribute') )
r = {"results": []}
valid_type = False
for k in misp_types_in:
if request.get(k):
# map the MISP typ to the CrowdStrike type
for item in lookup_indicator(client, request[k]):
r['results'].append(item)
valid_type = True
try:
for k in misp_type_in:
if attribute.type == k:
# map the MISP type to the CrowdStrike type
r['results'].append(lookup_indicator(client, attribute))
valid_type = True
except Exception as e:
return {'error': f"{e}"}
if not valid_type:
misperrors['error'] = "Unsupported attributes type"
return misperrors
return r
return {'results': r.get('results').pop()}
def lookup_indicator(client, item):
result = client.search_indicator(item)
for item in result:
for relation in item['relations']:
if mapping_out.get(relation['type']):
r = mapping_out[relation['type']].copy()
r['values'] = relation['indicator']
yield(r)
for actor in item['actors']:
r = mapping_out['actors'].copy()
r['values'] = actor
yield(r)
for malware_family in item['malware_families']:
r = mapping_out['malware_families'].copy()
r['values'] = malware_family
yield(r)
def lookup_indicator(client, ref_attribute):
result = client.search_indicator(ref_attribute.value)
misp_event = MISPEvent()
misp_event.add_attribute(**ref_attribute)
for item in result.get('resources', []):
for relation in item.get('relations'):
if mapping_out.get(relation.get('type')):
r = mapping_out[relation.get('type')].copy()
r['value'] = relation.get('indicator')
attribute = MISPAttribute()
attribute.from_dict(**r)
misp_event.add_attribute(**attribute)
for actor in item.get('actors'):
r = mapping_out.get('actors').copy()
r['value'] = actor
attribute = MISPAttribute()
attribute.from_dict(**r)
misp_event.add_attribute(**attribute)
if item.get('malware_families'):
r = mapping_out.get('malware_families').copy()
r['value'] = f"malware_families: {' | '.join(item.get('malware_families'))}"
attribute = MISPAttribute()
attribute.from_dict(**r)
misp_event.add_attribute(**attribute)
event = json.loads(misp_event.to_json())
return {'Object': event.get('Object', []), 'Attribute': event.get('Attribute', [])}
def introspection():
return mispattributes
@ -90,39 +115,25 @@ def version():
class CSIntelAPI():
def __init__(self, custid=None, custkey=None, perpage=100, page=1, baseurl="https://intelapi.crowdstrike.com/indicator/v2/search/"):
def __init__(self, custid=None, custkey=None):
# customer id and key should be passed when obj is created
self.custid = custid
self.custkey = custkey
self.falcon = Intel(client_id=custid, client_secret=custkey)
self.baseurl = baseurl
self.perpage = perpage
self.page = page
def request(self, query):
headers = {'X-CSIX-CUSTID': self.custid,
'X-CSIX-CUSTKEY': self.custkey,
'Content-Type': 'application/json'}
full_query = self.baseurl + query
r = requests.get(full_query, headers=headers)
def search_indicator(self, query):
r = self.falcon.query_indicator_entities(q=query)
# 400 - bad request
if r.status_code == 400:
if r.get('status_code') == 400:
raise Exception('HTTP Error 400 - Bad request.')
# 404 - oh shit
if r.status_code == 404:
if r.get('status_code') == 404:
raise Exception('HTTP Error 404 - awww snap.')
# catch all?
if r.status_code != 200:
raise Exception('HTTP Error: ' + str(r.status_code))
if r.get('status_code') != 200:
raise Exception('HTTP Error: ' + str(r.get('status_code')))
if r.text:
return r
if len(r.get('body').get('errors')):
raise Exception('API Error: ' + ' | '.join(r.get('body').get('errors')))
def search_indicator(self, item):
query = 'indicator?match=' + item
r = self.request(query)
return json.loads(r.text)
return r.get('body', {})

View File

@ -23,11 +23,11 @@ class VulnerabilityParser():
self.references = defaultdict(list)
self.capec_features = ('id', 'name', 'summary', 'prerequisites', 'solutions')
self.vulnerability_mapping = {
'id': ('vulnerability', 'id'), 'summary': ('text', 'summary'),
'vulnerable_configuration': ('cpe', 'vulnerable_configuration'),
'vulnerable_configuration_cpe_2_2': ('cpe', 'vulnerable_configuration'),
'Modified': ('datetime', 'modified'), 'Published': ('datetime', 'published'),
'references': ('link', 'references'), 'cvss': ('float', 'cvss-score')}
'id': 'id', 'summary': 'summary',
'vulnerable_configuration': 'vulnerable-configuration',
'vulnerable_configuration_cpe_2_2': 'vulnerable-configuration',
'Modified': 'modified', 'Published': 'published',
'references': 'references', 'cvss': 'cvss-score'}
self.weakness_mapping = {'name': 'name', 'description_summary': 'description',
'status': 'status', 'weaknessabs': 'weakness-abs'}
@ -43,18 +43,17 @@ class VulnerabilityParser():
for feature in ('id', 'summary', 'Modified', 'cvss'):
value = self.vulnerability.get(feature)
if value:
attribute_type, relation = self.vulnerability_mapping[feature]
vulnerability_object.add_attribute(relation, **{'type': attribute_type, 'value': value})
vulnerability_object.add_attribute(self.vulnerability_mapping[feature], value)
if 'Published' in self.vulnerability:
vulnerability_object.add_attribute('published', **{'type': 'datetime', 'value': self.vulnerability['Published']})
vulnerability_object.add_attribute('state', **{'type': 'text', 'value': 'Published'})
vulnerability_object.add_attribute('published', self.vulnerability['Published'])
vulnerability_object.add_attribute('state', 'Published')
for feature in ('references', 'vulnerable_configuration', 'vulnerable_configuration_cpe_2_2'):
if feature in self.vulnerability:
attribute_type, relation = self.vulnerability_mapping[feature]
relation = self.vulnerability_mapping[feature]
for value in self.vulnerability[feature]:
if isinstance(value, dict):
value = value['title']
vulnerability_object.add_attribute(relation, **{'type': attribute_type, 'value': value})
vulnerability_object.add_attribute(relation, value)
vulnerability_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(vulnerability_object)
if 'cwe' in self.vulnerability and self.vulnerability['cwe'] not in ('Unknown', 'NVD-CWE-noinfo'):
@ -74,10 +73,9 @@ class VulnerabilityParser():
for capec in self.vulnerability['capec']:
capec_object = MISPObject('attack-pattern')
for feature in self.capec_features:
capec_object.add_attribute(feature, **{'type': 'text', 'value': capec[feature]})
capec_object.add_attribute(feature, capec[feature])
for related_weakness in capec['related_weakness']:
attribute = {'type': 'weakness', 'value': f"CWE-{related_weakness}"}
capec_object.add_attribute('related-weakness', **attribute)
capec_object.add_attribute('related-weakness', f"CWE-{related_weakness}")
self.misp_event.add_object(capec_object)
self.references[vulnerability_uuid].append(
{
@ -87,16 +85,16 @@ class VulnerabilityParser():
)
def __parse_weakness(self, vulnerability_uuid):
cwe_string, cwe_id = self.vulnerability['cwe'].split('-')
cwe_string, cwe_id = self.vulnerability['cwe'].split('-')[:2]
cwes = requests.get(self.api_url.replace('/cve/', '/cwe'))
if cwes.status_code == 200:
for cwe in cwes.json():
if cwe['id'] == cwe_id:
weakness_object = MISPObject('weakness')
weakness_object.add_attribute('id', {'type': 'weakness', 'value': f'{cwe_string}-{cwe_id}'})
weakness_object.add_attribute('id', f'{cwe_string}-{cwe_id}')
for feature, relation in self.weakness_mapping.items():
if cwe.get(feature):
weakness_object.add_attribute(relation, **{'type': 'text', 'value': cwe[feature]})
weakness_object.add_attribute(relation, cwe[feature])
self.misp_event.add_object(weakness_object)
self.references[vulnerability_uuid].append(
{

View File

@ -1,3 +1,7 @@
# This module does not appear to be actively maintained.
# Please see https://github.com/DomainTools/domaintools_misp
# for the official DomainTools-supported MISP app
import json
import logging
import sys

View File

@ -1,22 +1,82 @@
import dnsdb2
import json
from ._dnsdb_query.dnsdb_query import DEFAULT_DNSDB_SERVER, DnsdbClient, QueryError
from . import check_input_attribute, standard_error_message
from pymisp import MISPEvent, MISPObject
from datetime import datetime
from pymisp import MISPEvent, MISPObject, Distribution
misperrors = {'error': 'Error'}
standard_query_input = [
'hostname',
'domain',
'ip-src',
'ip-dst'
]
flex_query_input = [
'btc',
'dkim',
'email',
'email-src',
'email-dst',
'domain|ip',
'hex',
'mac-address',
'mac-eui-64',
'other',
'pattern-filename',
'target-email',
'text',
'uri',
'url',
'whois-registrant-email',
]
mispattributes = {
'input': ['hostname', 'domain', 'ip-src', 'ip-dst'],
'input': standard_query_input + flex_query_input,
'format': 'misp_standard'
}
moduleinfo = {
'version': '0.2',
'version': '0.5',
'author': 'Christophe Vandeplas',
'description': 'Module to access Farsight DNSDB Passive DNS',
'module-type': ['expansion', 'hover']
}
moduleconfig = ['apikey', 'server', 'limit']
moduleconfig = ['apikey', 'server', 'limit', 'flex_queries']
DEFAULT_DNSDB_SERVER = 'https://api.dnsdb.info'
DEFAULT_LIMIT = 10
DEFAULT_DISTRIBUTION_SETTING = Distribution.your_organisation_only.value
TYPE_TO_FEATURE = {
"btc": "Bitcoin address",
"dkim": "domainkeys identified mail",
"domain": "domain name",
"domain|ip": "domain name / IP address",
"hex": "value in hexadecimal format",
"hostname": "hostname",
"mac-address": "MAC address",
"mac-eui-64": "MAC EUI-64 address",
"pattern-filename": "pattern in the name of a file",
"target-email": "attack target email",
"uri": "Uniform Resource Identifier",
"url": "Uniform Resource Locator",
"whois-registrant-email": "email of a domain's registrant"
}
TYPE_TO_FEATURE.update(
dict.fromkeys(
("ip-src", "ip-dst"),
"IP address"
)
)
TYPE_TO_FEATURE.update(
dict.fromkeys(
("email", "email-src", "email-dst"),
"email address"
)
)
TYPE_TO_FEATURE.update(
dict.fromkeys(
("other", "text"),
"text"
)
)
class FarsightDnsdbParser():
@ -25,8 +85,9 @@ class FarsightDnsdbParser():
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
self.passivedns_mapping = {
'bailiwick': {'type': 'text', 'object_relation': 'bailiwick'},
'bailiwick': {'type': 'domain', 'object_relation': 'bailiwick'},
'count': {'type': 'counter', 'object_relation': 'count'},
'raw_rdata': {'type': 'text', 'object_relation': 'raw_rdata'},
'rdata': {'type': 'text', 'object_relation': 'rdata'},
'rrname': {'type': 'text', 'object_relation': 'rrname'},
'rrtype': {'type': 'text', 'object_relation': 'rrtype'},
@ -35,37 +96,23 @@ class FarsightDnsdbParser():
'zone_time_first': {'type': 'datetime', 'object_relation': 'zone_time_first'},
'zone_time_last': {'type': 'datetime', 'object_relation': 'zone_time_last'}
}
self.type_to_feature = {
'domain': 'domain name',
'hostname': 'hostname',
'ip-src': 'IP address',
'ip-dst': 'IP address'
}
self.comment = 'Result from an %s lookup on DNSDB about the %s: %s'
self.comment = 'Result from a %s lookup on DNSDB about the %s: %s'
def parse_passivedns_results(self, query_response):
default_fields = ('count', 'rrname', 'rrname')
optional_fields = (
'bailiwick',
'time_first',
'time_last',
'zone_time_first',
'zone_time_last'
)
for query_type, results in query_response.items():
comment = self.comment % (query_type, self.type_to_feature[self.attribute['type']], self.attribute['value'])
comment = self.comment % (query_type, TYPE_TO_FEATURE[self.attribute['type']], self.attribute['value'])
for result in results:
passivedns_object = MISPObject('passive-dns')
for feature in default_fields:
passivedns_object.add_attribute(**self._parse_attribute(comment, feature, result[feature]))
for feature in optional_fields:
if result.get(feature):
passivedns_object.add_attribute(**self._parse_attribute(comment, feature, result[feature]))
if isinstance(result['rdata'], list):
for rdata in result['rdata']:
passivedns_object.distribution = DEFAULT_DISTRIBUTION_SETTING
if result.get('rdata') and isinstance(result['rdata'], list):
for rdata in result.pop('rdata'):
passivedns_object.add_attribute(**self._parse_attribute(comment, 'rdata', rdata))
else:
passivedns_object.add_attribute(**self._parse_attribute(comment, 'rdata', result['rdata']))
for feature, value in result.items():
passivedns_object.add_attribute(**self._parse_attribute(comment, feature, value))
if result.get('time_first'):
passivedns_object.first_seen = result['time_first']
if result.get('time_last'):
passivedns_object.last_seen = result['time_last']
passivedns_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(passivedns_object)
@ -75,7 +122,7 @@ class FarsightDnsdbParser():
return {'results': results}
def _parse_attribute(self, comment, feature, value):
attribute = {'value': value, 'comment': comment}
attribute = {'value': value, 'comment': comment, 'distribution': DEFAULT_DISTRIBUTION_SETTING}
attribute.update(self.passivedns_mapping[feature])
return attribute
@ -93,40 +140,90 @@ def handler(q=False):
if attribute['type'] not in mispattributes['input']:
return {'error': 'Unsupported attributes type'}
config = request['config']
args = {'apikey': config['apikey']}
for feature, default in zip(('server', 'limit'), (DEFAULT_DNSDB_SERVER, DEFAULT_LIMIT)):
args[feature] = config[feature] if config.get(feature) else default
client = DnsdbClient(**args)
to_query = lookup_ip if attribute['type'] in ('ip-src', 'ip-dst') else lookup_name
response = to_query(client, attribute['value'])
if not config.get('server'):
config['server'] = DEFAULT_DNSDB_SERVER
client_args = {feature: config[feature] for feature in ('apikey', 'server')}
client = dnsdb2.Client(**client_args)
to_query, args = parse_input(attribute, config)
try:
response = to_query(client, *args)
except dnsdb2.DnsdbException as e:
return {'error': e.__str__()}
except dnsdb2.exceptions.QueryError:
return {'error': 'Communication error occurs while executing a query, or the server reports an error due to invalid arguments.'}
if not response:
return {'error': f"Empty results on Farsight DNSDB for the queries {attribute['type']}: {attribute['value']}."}
return {'error': f"Empty results on Farsight DNSDB for the {TYPE_TO_FEATURE[attribute['type']]}: {attribute['value']}."}
parser = FarsightDnsdbParser(attribute)
parser.parse_passivedns_results(response)
return parser.get_results()
def lookup_name(client, name):
def parse_input(attribute, config):
lookup_args = {
'limit': config['limit'] if config.get('limit') else DEFAULT_LIMIT,
'offset': 0,
'ignore_limited': True,
'humantime': True
}
if attribute.get('first_seen'):
lookup_args['time_first_after'] = parse_timestamp(attribute['first_seen'])
attribute_type = attribute['type']
if attribute_type in flex_query_input:
return flex_queries, (lookup_args, attribute['value'])
flex = add_flex_queries(config.get('flex_queries'))
to_query = lookup_ip if 'ip-' in attribute_type else lookup_name
return to_query, (lookup_args, attribute['value'], flex)
def parse_timestamp(str_date):
datetime_date = datetime.strptime(str_date, '%Y-%m-%dT%H:%M:%S.%f%z')
return str(int(datetime_date.timestamp()))
def add_flex_queries(flex):
if not flex:
return False
if flex in ('True', 'true', True, '1', 1):
return True
return False
def flex_queries(client, lookup_args, name):
response = {}
try:
res = client.query_rrset(name) # RRSET = entries in the left-hand side of the domain name related labels
response['rrset'] = list(res)
except QueryError:
pass
try:
res = client.query_rdata_name(name) # RDATA = entries on the right-hand side of the domain name related labels
response['rdata'] = list(res)
except QueryError:
pass
name = name.replace('@', '.')
for feature in ('rdata', 'rrnames'):
to_call = getattr(client, f'flex_{feature}_regex')
results = list(to_call(name, **lookup_args))
for result in list(to_call(name.replace('.', '\\.'), **lookup_args)):
if result not in results:
results.append(result)
if results:
response[f'flex_{feature}'] = results
return response
def lookup_ip(client, ip):
try:
res = client.query_rdata_ip(ip)
response = {'rdata': list(res)}
except QueryError:
response = {}
def lookup_name(client, lookup_args, name, flex):
response = {}
# RRSET = entries in the left-hand side of the domain name related labels
rrset_response = list(client.lookup_rrset(name, **lookup_args))
if rrset_response:
response['rrset'] = rrset_response
# RDATA = entries on the right-hand side of the domain name related labels
rdata_response = list(client.lookup_rdata_name(name, **lookup_args))
if rdata_response:
response['rdata'] = rdata_response
if flex:
response.update(flex_queries(client, lookup_args, name))
return response
def lookup_ip(client, lookup_args, ip, flex):
response = {}
res = list(client.lookup_rdata_ip(ip, **lookup_args))
if res:
response['rdata'] = res
if flex:
response.update(flex_queries(client, lookup_args, ip))
return response

View File

@ -1,6 +1,8 @@
import json
import random
import time
try:
from google import google
from googleapi import google
except ImportError:
print("GoogleAPI not installed. Command : pip install git+https://github.com/abenassi/Google-Search-API")
@ -10,6 +12,10 @@ moduleinfo = {'author': 'Oun & Gindt', 'module-type': ['hover'],
'description': 'An expansion hover module to expand google search information about an URL'}
def sleep(retry):
time.sleep(random.uniform(0, min(40, 0.01 * 2 ** retry)))
def handler(q=False):
if q is False:
return False
@ -18,10 +24,16 @@ def handler(q=False):
return {'error': "Unsupported attributes type"}
num_page = 1
res = ""
search_results = google.search(request['url'], num_page)
for i in range(3):
# The googleapi module sets a random useragent. The output depends on the useragent.
# It's better to retry 3 times.
for retry in range(3):
search_results = google.search(request['url'], num_page)
if len(search_results) > 0:
break
sleep(retry)
for i, search_result in enumerate(search_results):
res += "("+str(i+1)+")" + '\t'
res += json.dumps(search_results[i].description, ensure_ascii=False)
res += json.dumps(search_result.description, ensure_ascii=False)
res += '\n\n'
return {'results': [{'types': mispattributes['output'], 'values':res}]}

View File

@ -1,61 +1,254 @@
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-dst', 'ip-src'], 'output': ['text']}
import requests
from pymisp import MISPEvent, MISPObject
misperrors = {"error": "Error"}
mispattributes = {"input": ["ip-dst", "ip-src", "vulnerability"], "output": ["text"]}
moduleinfo = {
'version': '0.2',
'author': 'Aurélien Schwab <aurelien.schwab+dev@gmail.com>',
'description': 'Module to access GreyNoise.io API.',
'module-type': ['hover']
"version": "1.1",
"author": "Brad Chiappetta <brad@greynoise.io>",
"description": "Module to access GreyNoise.io API.",
"module-type": ["hover"],
}
moduleconfig = ['api_key']
greynoise_api_url = 'https://api.greynoise.io/v2/noise/quick/'
moduleconfig = ["api_key", "api_type"]
codes_mapping = {
'0x00': 'The IP has never been observed scanning the Internet',
'0x01': 'The IP has been observed by the GreyNoise sensor network',
'0x02': 'The IP has been observed scanning the GreyNoise sensor network, but has not completed a full connection, meaning this can be spoofed',
'0x03': 'The IP is adjacent to another host that has been directly observed by the GreyNoise sensor network',
'0x04': 'Reserved',
'0x05': 'This IP is commonly spoofed in Internet-scan activity',
'0x06': 'This IP has been observed as noise, but this host belongs to a cloud provider where IPs can be cycled frequently',
'0x07': 'This IP is invalid',
'0x08': 'This IP was classified as noise, but has not been observed engaging in Internet-wide scans or attacks in over 60 days'
"0x00": "The IP has never been observed scanning the Internet",
"0x01": "The IP has been observed by the GreyNoise sensor network",
"0x02": "The IP has been observed scanning the GreyNoise sensor network, "
"but has not completed a full connection, meaning this can be spoofed",
"0x03": "The IP is adjacent to another host that has been directly observed by the GreyNoise sensor network",
"0x04": "Reserved",
"0x05": "This IP is commonly spoofed in Internet-scan activity",
"0x06": "This IP has been observed as noise, but this host belongs to a cloud provider where IPs can be "
"cycled frequently",
"0x07": "This IP is invalid",
"0x08": "This IP was classified as noise, but has not been observed engaging in Internet-wide scans or "
"attacks in over 90 days",
"0x09": "IP was found in RIOT",
"0x10": "IP has been observed by the GreyNoise sensor network and is in RIOT",
}
vulnerability_mapping = {
"id": ("vulnerability", "CVE #"),
"details": ("text", "Details"),
"count": ("text", "Total Scanner Count"),
}
enterprise_context_basic_mapping = {"ip": ("text", "IP Address"), "code_message": ("text", "Code Message")}
enterprise_context_advanced_mapping = {
"noise": ("text", "Is Internet Background Noise"),
"link": ("link", "Visualizer Link"),
"classification": ("text", "Classification"),
"actor": ("text", "Actor"),
"tags": ("text", "Tags"),
"cve": ("text", "CVEs"),
"first_seen": ("text", "First Seen Scanning"),
"last_seen": ("text", "Last Seen Scanning"),
"vpn": ("text", "Known VPN Service"),
"vpn_service": ("text", "VPN Service Name"),
"bot": ("text", "Known BOT"),
}
enterprise_context_advanced_metadata_mapping = {
"asn": ("text", "ASN"),
"rdns": ("text", "rDNS"),
"category": ("text", "Category"),
"tor": ("text", "Known Tor Exit Node"),
"region": ("text", "Region"),
"city": ("text", "City"),
"country": ("text", "Country"),
"country_code": ("text", "Country Code"),
"organization": ("text", "Organization"),
}
enterprise_riot_mapping = {
"riot": ("text", "Is Common Business Service"),
"link": ("link", "Visualizer Link"),
"category": ("text", "RIOT Category"),
"name": ("text", "Provider Name"),
"trust_level": ("text", "RIOT Trust Level"),
"last_updated": ("text", "Last Updated"),
}
community_found_mapping = {
"ip": ("text", "IP Address"),
"noise": ("text", "Is Internet Background Noise"),
"riot": ("text", "Is Common Business Service"),
"classification": ("text", "Classification"),
"last_seen": ("text", "Last Seen"),
"name": ("text", "Name"),
"link": ("link", "Visualizer Link"),
}
community_not_found_mapping = {
"ip": ("text", "IP Address"),
"noise": ("text", "Is Internet Background Noise"),
"riot": ("text", "Is Common Business Service"),
"message": ("text", "Message"),
}
misp_event = MISPEvent()
def handler(q=False):
def handler(q=False): # noqa: C901
if q is False:
return False
request = json.loads(q)
if not request.get('config') or not request['config'].get('api_key'):
return {'error': 'Missing Greynoise API key.'}
if not request.get("config") or not request["config"].get("api_key"):
return {"error": "Missing Greynoise API key."}
headers = {
'Accept': 'application/json',
'key': request['config']['api_key']
"Accept": "application/json",
"key": request["config"]["api_key"],
"User-Agent": "greynoise-misp-module-{}".format(moduleinfo["version"]),
}
for input_type in mispattributes['input']:
if input_type in request:
ip = request[input_type]
break
else:
misperrors['error'] = "Unsupported attributes type."
if not (request.get("vulnerability") or request.get("ip-dst") or request.get("ip-src")):
misperrors["error"] = "Vulnerability id missing"
return misperrors
response = requests.get(f'{greynoise_api_url}{ip}', headers=headers) # Real request
if response.status_code == 200: # OK (record found)
return {'results': [{'types': mispattributes['output'], 'values': codes_mapping[response.json()['code']]}]}
ip = ""
vulnerability = ""
if request.get("ip-dst"):
ip = request.get("ip-dst")
elif request.get("ip-src"):
ip = request.get("ip-src")
else:
vulnerability = request.get("vulnerability")
if ip:
if request["config"]["api_type"] and request["config"]["api_type"] == "enterprise":
greynoise_api_url = "https://api.greynoise.io/v2/noise/quick/"
else:
greynoise_api_url = "https://api.greynoise.io/v3/community/"
response = requests.get(f"{greynoise_api_url}{ip}", headers=headers) # Real request for IP Query
if response.status_code == 200:
if request["config"]["api_type"] == "enterprise":
response = response.json()
enterprise_context_object = MISPObject("greynoise-ip-context")
for feature in ("ip", "code_message"):
if feature == "code_message":
value = codes_mapping[response.get("code")]
else:
value = response.get(feature)
if value:
attribute_type, relation = enterprise_context_basic_mapping[feature]
enterprise_context_object.add_attribute(relation, **{"type": attribute_type, "value": value})
if response["noise"]:
greynoise_api_url = "https://api.greynoise.io/v2/noise/context/"
context_response = requests.get(f"{greynoise_api_url}{ip}", headers=headers)
context_response = context_response.json()
context_response["link"] = "https://www.greynoise.io/viz/ip/" + ip
if "tags" in context_response:
context_response["tags"] = ",".join(context_response["tags"])
if "cve" in context_response:
context_response["cve"] = ",".join(context_response["cve"])
for feature in enterprise_context_advanced_mapping.keys():
value = context_response.get(feature)
if value:
attribute_type, relation = enterprise_context_advanced_mapping[feature]
enterprise_context_object.add_attribute(
relation, **{"type": attribute_type, "value": value}
)
for feature in enterprise_context_advanced_metadata_mapping.keys():
value = context_response["metadata"].get(feature)
if value:
attribute_type, relation = enterprise_context_advanced_metadata_mapping[feature]
enterprise_context_object.add_attribute(
relation, **{"type": attribute_type, "value": value}
)
if response["riot"]:
greynoise_api_url = "https://api.greynoise.io/v2/riot/"
riot_response = requests.get(f"{greynoise_api_url}{ip}", headers=headers)
riot_response = riot_response.json()
riot_response["link"] = "https://www.greynoise.io/viz/riot/" + ip
for feature in enterprise_riot_mapping.keys():
value = riot_response.get(feature)
if value:
attribute_type, relation = enterprise_riot_mapping[feature]
enterprise_context_object.add_attribute(
relation, **{"type": attribute_type, "value": value}
)
misp_event.add_object(enterprise_context_object)
event = json.loads(misp_event.to_json())
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
return {"results": results}
else:
response = response.json()
community_context_object = MISPObject("greynoise-community-ip-context")
for feature in community_found_mapping.keys():
value = response.get(feature)
if value:
attribute_type, relation = community_found_mapping[feature]
community_context_object.add_attribute(relation, **{"type": attribute_type, "value": value})
misp_event.add_object(community_context_object)
event = json.loads(misp_event.to_json())
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
return {"results": results}
if response.status_code == 404 and request["config"]["api_type"] != "enterprise":
response = response.json()
community_context_object = MISPObject("greynoise-community-ip-context")
for feature in community_not_found_mapping.keys():
value = response.get(feature)
if value:
attribute_type, relation = community_not_found_mapping[feature]
community_context_object.add_attribute(relation, **{"type": attribute_type, "value": value})
misp_event.add_object(community_context_object)
event = json.loads(misp_event.to_json())
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
return {"results": results}
if vulnerability:
if request["config"]["api_type"] and request["config"]["api_type"] == "enterprise":
greynoise_api_url = "https://api.greynoise.io/v2/experimental/gnql/stats"
querystring = {"query": f"last_seen:1w cve:{vulnerability}"}
else:
misperrors["error"] = "Vulnerability Not Supported with Community API Key"
return misperrors
response = requests.get(f"{greynoise_api_url}", headers=headers, params=querystring) # Real request
if response.status_code == 200:
response = response.json()
vulnerability_object = MISPObject("greynoise-vuln-info")
response["details"] = (
"The IP count below reflects the number of IPs seen "
"by GreyNoise in the last 7 days scanning for this CVE."
)
response["id"] = vulnerability
for feature in ("id", "details", "count"):
value = response.get(feature)
if value:
attribute_type, relation = vulnerability_mapping[feature]
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
classifications = response["stats"].get("classifications")
for item in classifications:
if item["classification"] == "benign":
value = item["count"]
attribute_type, relation = ("text", "Benign Scanner Count")
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
if item["classification"] == "unknown":
value = item["count"]
attribute_type, relation = ("text", "Unknown Scanner Count")
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
if item["classification"] == "malicious":
value = item["count"]
attribute_type, relation = ("text", "Malicious Scanner Count")
vulnerability_object.add_attribute(relation, **{"type": attribute_type, "value": value})
misp_event.add_object(vulnerability_object)
event = json.loads(misp_event.to_json())
results = {key: event[key] for key in ("Attribute", "Object") if (key in event and event[key])}
return {"results": results}
# There is an error
errors = {
400: "Bad request.",
404: "IP not observed scanning the internet or contained in RIOT data set.",
401: "Unauthorized. Please check your API key.",
429: "Too many requests. You've hit the rate-limit."
429: "Too many requests. You've hit the rate-limit.",
}
try:
misperrors['error'] = errors[response.status_code]
misperrors["error"] = errors[response.status_code]
except KeyError:
misperrors['error'] = f'GreyNoise API not accessible (HTTP {response.status_code})'
return misperrors['error']
misperrors["error"] = f"GreyNoise API not accessible (HTTP {response.status_code})"
return misperrors
def introspection():
@ -63,5 +256,5 @@ def introspection():
def version():
moduleinfo['config'] = moduleconfig
moduleinfo["config"] = moduleconfig
return moduleinfo

View File

@ -2,10 +2,10 @@ import json
import requests
misperrors = {'error': 'Error'}
mispattributes = {'input': ['md5', 'sha1', 'sha256'], 'output': ['text']}
mispattributes = {'input': ['md5'], 'output': ['text']}
moduleinfo = {'version': '0.2', 'author': 'Alexandre Dulaunoy', 'description': 'An expansion module to check hashes against hashdd.com including NSLR dataset.', 'module-type': ['hover']}
moduleconfig = []
hashddapi_url = 'https://api.hashdd.com/'
hashddapi_url = 'https://api.hashdd.com/v1/knownlevel/nsrl/'
def handler(q=False):
@ -20,10 +20,10 @@ def handler(q=False):
if v is None:
misperrors['error'] = 'Hash value is missing.'
return misperrors
r = requests.post(hashddapi_url, data={'hash': v})
r = requests.get(hashddapi_url + v)
if r.status_code == 200:
state = json.loads(r.text)
summary = state[v]['known_level'] if state and state.get(v) else 'Unknown hash'
summary = state['knownlevel'] if state and state['result'] == "SUCCESS" else state['message']
else:
misperrors['error'] = '{} API not accessible'.format(hashddapi_url)
return misperrors['error']

View File

@ -0,0 +1,108 @@
import json
import requests
from . import check_input_attribute, standard_error_message
from collections import defaultdict
from pymisp import MISPEvent, MISPObject
misperrors = {'error': 'Error'}
mispattributes = {'input': ['md5', 'sha1', 'sha256'], 'format': 'misp_standard'}
moduleinfo = {'version': '2', 'author': 'Alexandre Dulaunoy',
'description': 'An expansion module to enrich a file hash with hashlookup.circl.lu services (NSRL and other sources)',
'module-type': ['expansion', 'hover']}
moduleconfig = ["custom_API"]
hashlookup_url = 'https://hashlookup.circl.lu/'
class HashlookupParser():
def __init__(self, attribute, hashlookupresult, api_url):
self.attribute = attribute
self.hashlookupresult = hashlookupresult
self.api_url = api_url
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
self.references = defaultdict(list)
def get_result(self):
if self.references:
self.__build_references()
event = json.loads(self.misp_event.to_json())
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
def parse_hashlookup_information(self):
hashlookup_object = MISPObject('hashlookup')
if 'source' in self.hashlookupresult:
hashlookup_object.add_attribute('source', **{'type': 'text', 'value': self.hashlookupresult['source']})
if 'KnownMalicious' in self.hashlookupresult:
hashlookup_object.add_attribute('KnownMalicious', **{'type': 'text', 'value': self.hashlookupresult['KnownMalicious']})
if 'MD5' in self.hashlookupresult:
hashlookup_object.add_attribute('MD5', **{'type': 'md5', 'value': self.hashlookupresult['MD5']})
# SHA-1 is the default value in hashlookup it must always be present
hashlookup_object.add_attribute('SHA-1', **{'type': 'sha1', 'value': self.hashlookupresult['SHA-1']})
if 'SHA-256' in self.hashlookupresult:
hashlookup_object.add_attribute('SHA-256', **{'type': 'sha256', 'value': self.hashlookupresult['SHA-256']})
if 'SSDEEP' in self.hashlookupresult:
hashlookup_object.add_attribute('SSDEEP', **{'type': 'ssdeep', 'value': self.hashlookupresult['SSDEEP']})
if 'TLSH' in self.hashlookupresult:
hashlookup_object.add_attribute('TLSH', **{'type': 'tlsh', 'value': self.hashlookupresult['TLSH']})
if 'FileName' in self.hashlookupresult:
hashlookup_object.add_attribute('FileName', **{'type': 'filename', 'value': self.hashlookupresult['FileName']})
if 'FileSize' in self.hashlookupresult:
hashlookup_object.add_attribute('FileSize', **{'type': 'size-in-bytes', 'value': self.hashlookupresult['FileSize']})
hashlookup_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(hashlookup_object)
def __build_references(self):
for object_uuid, references in self.references.items():
for misp_object in self.misp_event.objects:
if misp_object.uuid == object_uuid:
for reference in references:
misp_object.add_reference(**reference)
break
def check_url(url):
return "{}/".format(url) if not url.endswith('/') else url
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
attribute = request['attribute']
if attribute.get('type') == 'md5':
pass
elif attribute.get('type') == 'sha1':
pass
elif attribute.get('type') == 'sha256':
pass
else:
misperrors['error'] = 'md5 or sha1 or sha256 is missing.'
return misperrors
api_url = check_url(request['config']['custom_API']) if request['config'].get('custom_API') else hashlookup_url
r = requests.get("{}/lookup/{}/{}".format(api_url, attribute.get('type'), attribute['value']))
if r.status_code == 200:
hashlookupresult = r.json()
if not hashlookupresult:
misperrors['error'] = 'Empty result'
return misperrors
elif r.status_code == 404:
misperrors['error'] = 'Non existing hash'
return misperrors
else:
misperrors['error'] = 'API not accessible'
return misperrors
parser = HashlookupParser(attribute, hashlookupresult, api_url)
parser.parse_hashlookup_information()
result = parser.get_result()
return result
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['email-dst', 'email-src'], 'output': ['text']} # All mails as input
moduleinfo = {'version': '0.1', 'author': 'Aurélien Schwab', 'description': 'Module to access haveibeenpwned.com API.', 'module-type': ['hover']}
moduleconfig = ['user-agent'] # TODO take this into account in the code
mispattributes = {'input': ['email-dst', 'email-src'], 'output': ['text']}
moduleinfo = {'version': '0.2', 'author': 'Corsin Camichel, Aurélien Schwab', 'description': 'Module to access haveibeenpwned.com API (v3).', 'module-type': ['hover']}
moduleconfig = ['api_key']
haveibeenpwned_api_url = 'https://api.haveibeenpwned.com/api/v2/breachedaccount/'
default_user_agent = 'MISP-Module' # User agent (must be set, requiered by API))
haveibeenpwned_api_url = 'https://haveibeenpwned.com/api/v3/breachedaccount/'
API_KEY = "" # details at https://www.troyhunt.com/authentication-and-the-have-i-been-pwned-api/
def handler(q=False):
@ -22,15 +23,21 @@ def handler(q=False):
misperrors['error'] = "Unsupported attributes type"
return misperrors
r = requests.get(haveibeenpwned_api_url + email, headers={'user-agent': default_user_agent}) # Real request
if r.status_code == 200: # OK (record found)
if request.get('config') is None or request['config'].get('api_key') is None:
misperrors['error'] = 'Have I Been Pwned authentication is incomplete (no API key)'
return misperrors
else:
API_KEY = request['config'].get('api_key')
r = requests.get(haveibeenpwned_api_url + email, headers={'hibp-api-key': API_KEY})
if r.status_code == 200:
breaches = json.loads(r.text)
if breaches:
return {'results': [{'types': mispattributes['output'], 'values': breaches}]}
elif r.status_code == 404: # Not found (not an error)
elif r.status_code == 404:
return {'results': [{'types': mispattributes['output'], 'values': 'OK (Not Found)'}]}
else: # Real error
misperrors['error'] = 'haveibeenpwned.com API not accessible (HTTP ' + str(r.status_code) + ')'
else:
misperrors['error'] = f'haveibeenpwned.com API not accessible (HTTP {str(r.status_code)})'
return misperrors['error']

View File

@ -0,0 +1,873 @@
import json
import logging
from typing import Dict, List, Any
import requests
import re
from requests.exceptions import (
HTTPError,
ProxyError,
InvalidURL,
ConnectTimeout
)
from . import check_input_attribute, standard_error_message
from pymisp import MISPEvent, MISPObject, Distribution
ip_query_input_type = [
'ip-src',
'ip-dst'
]
domain_query_input_type = [
'hostname',
'domain'
]
email_query_input_type = [
'email',
'email-src',
'email-dst',
'target-email',
'whois-registrant-email'
]
phone_query_input_type = [
'phone-number',
'whois-registrant-phone'
]
md5_query_input_type = [
'md5',
'x509-fingerprint-md5',
'ja3-fingerprint-md5',
'hassh-md5',
'hasshserver-md5'
]
sha1_query_input_type = [
'sha1',
'x509-fingerprint-sha1'
]
sha256_query_input_type = [
'sha256',
'x509-fingerprint-sha256'
]
sha512_query_input_type = [
'sha512'
]
misperrors = {
'error': 'Error'
}
mispattributes = {
'input': ip_query_input_type + domain_query_input_type + email_query_input_type + phone_query_input_type
+ md5_query_input_type + sha1_query_input_type + sha256_query_input_type + sha512_query_input_type,
'format': 'misp_standard'
}
moduleinfo = {
'version': '0.1',
'author': 'Mike Champ',
'description': '',
'module-type': ['expansion', 'hover']
}
moduleconfig = ['apikey']
TIMEOUT = 60
logger = logging.getLogger('hyasinsight')
logger.setLevel(logging.DEBUG)
HYAS_API_BASE_URL = 'https://insight.hyas.com/api/ext/'
WHOIS_CURRENT_BASE_URL = 'https://api.hyas.com/'
DEFAULT_DISTRIBUTION_SETTING = Distribution.your_organisation_only.value
IPV4_REGEX = r'\b((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b([^\/]|$)'
IPV6_REGEX = r'\b(?:(?:[0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:(?:(:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\b' # noqa: E501
# Enrichment Types
# HYAS API endpoints
PASSIVE_DNS_ENDPOINT = 'passivedns'
DYNAMIC_DNS_ENDPOINT = 'dynamicdns'
PASSIVE_HASH_ENDPOINT = 'passivehash'
SINKHOLE_ENDPOINT = 'sinkhole'
SSL_CERTIFICATE_ENDPOINT = 'ssl_certificate'
DEVICE_GEO_ENDPOINT = 'device_geo'
WHOIS_HISTORIC_ENDPOINT = 'whois'
WHOIS_CURRENT_ENDPOINT = 'whois/v1'
MALWARE_RECORDS_ENDPOINT = 'sample'
MALWARE_INFORMATION_ENDPOINT = 'sample/information'
C2ATTRIBUTION_ENDPOINT = 'c2attribution'
OPEN_SOURCE_INDICATORS_ENDPOINT = 'os_indicators'
# HYAS API endpoint params
DOMAIN_PARAM = 'domain'
IP_PARAM = 'ip'
IPV4_PARAM = 'ipv4'
IPV6_PARAM = 'ipv6'
EMAIL_PARAM = 'email'
PHONE_PARAM = 'phone'
MD5_PARAM = 'md5'
SHA256_PARAM = 'sha256'
SHA512_PARAM = 'sha512'
HASH_PARAM = 'hash'
SHA1_PARAM = 'sha1'
HYAS_IP_ENRICHMENT_ENDPOINTS_LIST = [DYNAMIC_DNS_ENDPOINT, PASSIVE_DNS_ENDPOINT, PASSIVE_HASH_ENDPOINT,
SINKHOLE_ENDPOINT,
SSL_CERTIFICATE_ENDPOINT, DEVICE_GEO_ENDPOINT, C2ATTRIBUTION_ENDPOINT,
MALWARE_RECORDS_ENDPOINT, OPEN_SOURCE_INDICATORS_ENDPOINT]
HYAS_DOMAIN_ENRICHMENT_ENDPOINTS_LIST = [PASSIVE_DNS_ENDPOINT, DYNAMIC_DNS_ENDPOINT, WHOIS_HISTORIC_ENDPOINT,
MALWARE_RECORDS_ENDPOINT, WHOIS_CURRENT_ENDPOINT, PASSIVE_HASH_ENDPOINT,
C2ATTRIBUTION_ENDPOINT, SSL_CERTIFICATE_ENDPOINT,
OPEN_SOURCE_INDICATORS_ENDPOINT]
HYAS_EMAIL_ENRICHMENT_ENDPOINTS_LIST = [DYNAMIC_DNS_ENDPOINT, WHOIS_HISTORIC_ENDPOINT, C2ATTRIBUTION_ENDPOINT]
HYAS_PHONE_ENRICHMENT_ENDPOINTS_LIST = [WHOIS_HISTORIC_ENDPOINT]
HYAS_SHA1_ENRICHMENT_ENDPOINTS_LIST = [SSL_CERTIFICATE_ENDPOINT, MALWARE_INFORMATION_ENDPOINT,
OPEN_SOURCE_INDICATORS_ENDPOINT]
HYAS_SHA256_ENRICHMENT_ENDPOINTS_LIST = [C2ATTRIBUTION_ENDPOINT, MALWARE_INFORMATION_ENDPOINT,
OPEN_SOURCE_INDICATORS_ENDPOINT]
HYAS_SHA512_ENRICHMENT_ENDPOINTS_LIST = [MALWARE_INFORMATION_ENDPOINT]
HYAS_MD5_ENRICHMENT_ENDPOINTS_LIST = [MALWARE_RECORDS_ENDPOINT, MALWARE_INFORMATION_ENDPOINT,
OPEN_SOURCE_INDICATORS_ENDPOINT]
HYAS_OBJECT_NAMES = {
DYNAMIC_DNS_ENDPOINT: "Dynamic DNS Information",
PASSIVE_HASH_ENDPOINT: "Passive Hash Information",
SINKHOLE_ENDPOINT: "Sinkhole Information",
SSL_CERTIFICATE_ENDPOINT: "SSL Certificate Information",
DEVICE_GEO_ENDPOINT: "Mobile Geolocation Information",
C2ATTRIBUTION_ENDPOINT: "C2 Attribution Information",
PASSIVE_DNS_ENDPOINT: "Passive DNS Information",
WHOIS_HISTORIC_ENDPOINT: "Whois Related Information",
WHOIS_CURRENT_ENDPOINT: "Whois Current Related Information",
MALWARE_INFORMATION_ENDPOINT: "Malware Sample Information",
OPEN_SOURCE_INDICATORS_ENDPOINT: "Open Source Intel for malware, ssl certificates and other indicators Information",
MALWARE_RECORDS_ENDPOINT: "Malware Sample Records Information"
}
def parse_attribute(comment, feature, value):
"""Generic Method for parsing the attributes in the object"""
attribute = {
'type': 'text',
'value': value,
'comment': comment,
'distribution': DEFAULT_DISTRIBUTION_SETTING,
'object_relation': feature
}
return attribute
def misp_object(endpoint, attribute_value):
object_name = HYAS_OBJECT_NAMES[endpoint]
hyas_object = MISPObject(object_name)
hyas_object.distribution = DEFAULT_DISTRIBUTION_SETTING
hyas_object.template_uuid = "d69d3d15-7b4d-49b1-9e0a-bb29f3d421d9"
hyas_object.template_id = "1"
hyas_object.description = "HYAS INSIGHT " + object_name
hyas_object.comment = "HYAS INSIGHT " + object_name + " for " + attribute_value
setattr(hyas_object, 'meta-category', 'network')
description = (
"An object containing the enriched attribute and "
"related entities from HYAS Insight."
)
hyas_object.from_dict(
**{"meta-category": "misc", "description": description,
"distribution": DEFAULT_DISTRIBUTION_SETTING}
)
return hyas_object
def flatten_json(y: Dict) -> Dict[str, Any]:
"""
:param y: raw_response from HYAS api
:return: Flatten json response
"""
out = {}
def flatten(x, name=''):
# If the Nested key-value
# pair is of dict type
if type(x) is dict:
for a in x:
flatten(x[a], name + a + '_')
else:
out[name[:-1]] = x
flatten(y)
return out
def get_flatten_json_response(raw_api_response: List[Dict]) -> List[Dict]:
"""
:param raw_api_response: raw_api response from the API
:return: Flatten Json response
"""
flatten_json_response = []
if raw_api_response:
for obj in raw_api_response:
flatten_json_response.append(flatten_json(obj))
return flatten_json_response
def request_body(query_input, query_param, current):
"""
This Method returns the request body for specific endpoint.
"""
if current:
return {
"applied_filters": {
query_input: query_param,
"current": True
}
}
else:
return {
"applied_filters": {
query_input: query_param
}
}
def malware_info_lookup_to_markdown(results: Dict) -> list:
scan_results = results.get('scan_results', [])
out = []
if scan_results:
for res in scan_results:
malware_info_data = {
"avscan_score": results.get(
"avscan_score", ''),
"md5": results.get("md5", ''),
'av_name': res.get(
"av_name", ''),
'def_time': res.get(
"def_time", ''),
'threat_found': res.get(
'threat_found', ''),
'scan_time': results.get("scan_time", ''),
'sha1': results.get('sha1', ''),
'sha256': results.get('sha256', ''),
'sha512': results.get('sha512', '')
}
out.append(malware_info_data)
else:
malware_info_data = {
"avscan_score": results.get("avscan_score", ''),
"md5": results.get("md5", ''),
'av_name': '',
'def_time': '',
'threat_found': '',
'scan_time': results.get("scan_time", ''),
'sha1': results.get('sha1', ''),
'sha256': results.get('sha256', ''),
'sha512': results.get('sha512', '')
}
out.append(malware_info_data)
return out
class RequestHandler:
"""A class for handling any outbound requests from this module."""
def __init__(self, apikey):
self.session = requests.Session()
self.api_key = apikey
def get(self, url: str, headers: dict = None, req_body=None) -> requests.Response:
"""General post method to fetch the response from HYAS Insight."""
response = []
try:
response = self.session.post(
url, headers=headers, json=req_body
)
if response:
response = response.json()
except (ConnectTimeout, ProxyError, InvalidURL) as error:
msg = "Error connecting with the HYAS Insight."
logger.error(f"{msg} Error: {error}")
misperrors["error"] = msg
return response
def hyas_lookup(self, end_point: str, query_input, query_param, current=False) -> requests.Response:
"""Do a lookup call."""
# Building the request
if current:
url = f'{WHOIS_CURRENT_BASE_URL}{WHOIS_CURRENT_ENDPOINT}'
else:
url = f'{HYAS_API_BASE_URL}{end_point}'
headers = {
'Content-type': 'application/json',
'X-API-Key': self.api_key,
}
req_body = request_body(query_input, query_param, current)
try:
response = self.get(url, headers, req_body)
except HTTPError as error:
msg = f"Error when requesting data from HYAS Insight. {error.response}: {error.response.reason}"
logger.error(msg)
misperrors["error"] = msg
raise
return response
class HyasInsightParser:
"""A class for handling the enrichment objects"""
def __init__(self, attribute):
self.attribute = attribute
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
self.c2_attribution_data_items = [
'actor_ipv4',
'c2_domain',
'c2_ip',
'c2_url',
'datetime',
'email',
'email_domain',
'referrer_domain',
'referrer_ipv4',
'referrer_url',
'sha256'
]
self.c2_attribution_data_items_friendly_names = {
'actor_ipv4': 'Actor IPv4',
'c2_domain': 'C2 Domain',
'c2_ip': 'C2 IP',
'c2_url': 'C2 URL',
'datetime': 'DateTime',
'email': 'Email',
'email_domain': 'Email Domain',
'referrer_domain': 'Referrer Domain',
'referrer_ipv4': 'Referrer IPv4',
'referrer_url': 'Referrer URL',
'sha256': 'SHA256'
}
self.device_geo_data_items = [
'datetime',
'device_user_agent',
'geo_country_alpha_2',
'geo_horizontal_accuracy',
'ipv4',
'ipv6',
'latitude',
'longitude',
'wifi_bssid'
]
self.device_geo_data_items_friendly_names = {
'datetime': 'DateTime',
'device_user_agent': 'Device User Agent',
'geo_country_alpha_2': 'Alpha-2 Code',
'geo_horizontal_accuracy': 'GPS Horizontal Accuracy',
'ipv4': 'IPv4 Address',
'ipv6': 'IPv6 Address',
'latitude': 'Latitude',
'longitude': 'Longitude',
'wifi_bssid': 'WIFI BSSID'
}
self.dynamic_dns_data_items = [
'a_record',
'account',
'created',
'created_ip',
'domain',
'domain_creator_ip',
'email',
]
self.dynamic_dns_data_items_friendly_names = {
'a_record': 'A Record',
'account': 'Account Holder',
'created': 'Created Date',
'created_ip': 'Account Holder IP Address',
'domain': 'Domain',
'domain_creator_ip': 'Domain Creator IP Address',
'email': 'Email Address',
}
self.os_indicators_data_items = [
'context',
'datetime',
'domain',
'domain_2tld',
'first_seen',
'ipv4',
'ipv6',
'last_seen',
'md5',
'sha1',
'sha256',
'source_name',
'source_url',
'url'
]
self.os_indicators_data_items_friendly_names = {
'context': 'Context',
'datetime': 'DateTime',
'domain': 'Domain',
'domain_2tld': 'Domain 2TLD',
'first_seen': 'First Seen',
'ipv4': 'IPv4 Address',
'ipv6': 'IPv6 Address',
'last_seen': 'Last Seen',
'md5': 'MD5',
'sha1': 'SHA1',
'sha256': 'SHA256',
'source_name': 'Source Name',
'source_url': 'Source URL',
'url': 'URL'
}
self.passive_dns_data_items = [
'cert_name',
'count',
'domain',
'first_seen',
'ip_geo_city_name',
'ip_geo_country_iso_code',
'ip_geo_country_name',
'ip_geo_location_latitude',
'ip_geo_location_longitude',
'ip_geo_postal_code',
'ip_ip',
'ip_isp_autonomous_system_number',
'ip_isp_autonomous_system_organization',
'ip_isp_ip_address',
'ip_isp_isp',
'ip_isp_organization',
'ipv4',
'ipv6',
'last_seen'
]
self.passive_dns_data_items_friendly_names = {
'cert_name': 'Certificate Provider Name',
'count': 'Passive DNS Count',
'domain': 'Domain',
'first_seen': 'First Seen',
'ip_geo_city_name': 'IP Organization City',
'ip_geo_country_iso_code': 'IP Organization Country ISO Code',
'ip_geo_country_name': 'IP Organization Country Name',
'ip_geo_location_latitude': 'IP Organization Latitude',
'ip_geo_location_longitude': 'IP Organization Longitude',
'ip_geo_postal_code': 'IP Organization Postal Code',
'ip_ip': 'IP Address',
'ip_isp_autonomous_system_number': 'ASN IP',
'ip_isp_autonomous_system_organization': 'ASO IP',
'ip_isp_ip_address': 'IP Address',
'ip_isp_isp': 'ISP',
'ip_isp_organization': 'ISP Organization',
'ipv4': 'IPv4 Address',
'ipv6': 'IPv6 Address',
'last_seen': 'Last Seen'
}
self.passive_hash_data_items = [
'domain',
'md5_count'
]
self.passive_hash_data_items_friendly_names = {
'domain': 'Domain',
'md5_count': 'Passive DNS Count'
}
self.malware_records_data_items = [
'datetime',
'domain',
'ipv4',
'ipv6',
'md5',
'sha1',
'sha256'
]
self.malware_records_data_items_friendly_names = {
'datetime': 'DateTime',
'domain': 'Domain',
'ipv4': 'IPv4 Address',
'ipv6': 'IPv6 Address',
'md5': 'MD5',
'sha1': 'SHA1',
'sha256': 'SHA256'
}
self.malware_information_data_items = [
'avscan_score',
'md5',
'av_name',
'def_time',
'threat_found',
'scan_time',
'sha1',
'sha256',
'sha512'
]
self.malware_information_data_items_friendly_names = {
'avscan_score': 'AV Scan Score',
'md5': 'MD5',
'av_name': 'AV Name',
'def_time': 'AV DateTime',
'threat_found': 'Source',
'scan_time': 'Scan DateTime',
'sha1': 'SHA1',
'sha256': 'SHA256',
'sha512': 'SHA512'
}
self.sinkhole_data_items = [
'count',
'country_name',
'country_code',
'data_port',
'datetime',
'ipv4',
'last_seen',
'organization_name',
'sink_source'
]
self.sinkhole_data_items_friendly_names = {
'count': 'Sinkhole Count',
'country_name': 'IP Address Country',
'country_code': 'IP Address Country Code',
'data_port': 'Data Port',
'datetime': 'First Seen',
'ipv4': 'IP Address',
'last_seen': 'Last Seen',
'organization_name': 'ISP Organization',
'sink_source': 'Sink Source IP'
}
self.ssl_certificate_data_items = [
'ip',
'ssl_cert_cert_key',
'ssl_cert_expire_date',
'ssl_cert_issue_date',
'ssl_cert_issuer_commonName',
'ssl_cert_issuer_countryName',
'ssl_cert_issuer_localityName',
'ssl_cert_issuer_organizationName',
'ssl_cert_issuer_organizationalUnitName',
'ssl_cert_issuer_stateOrProvinceName',
'ssl_cert_md5',
'ssl_cert_serial_number',
'ssl_cert_sha1',
'ssl_cert_sha_256',
'ssl_cert_sig_algo',
'ssl_cert_ssl_version',
'ssl_cert_subject_commonName',
'ssl_cert_subject_countryName',
'ssl_cert_subject_localityName',
'ssl_cert_subject_organizationName',
'ssl_cert_subject_organizationalUnitName',
'ssl_cert_timestamp'
]
self.ssl_certificate_data_items_friendly_names = {
'ip': 'IP Address',
'ssl_cert_cert_key': 'Certificate Key',
'ssl_cert_expire_date': 'Certificate Expiration Date',
'ssl_cert_issue_date': 'Certificate Issue Date',
'ssl_cert_issuer_commonName': 'Issuer Common Name',
'ssl_cert_issuer_countryName': 'Issuer Country Name',
'ssl_cert_issuer_localityName': 'Issuer City Name',
'ssl_cert_issuer_organizationName': 'Issuer Organization Name',
'ssl_cert_issuer_organizationalUnitName': 'Issuer Organization Unit Name',
'ssl_cert_issuer_stateOrProvinceName': 'Issuer State or Province Name',
'ssl_cert_md5': 'Certificate MD5',
'ssl_cert_serial_number': 'Certificate Serial Number',
'ssl_cert_sha1': 'Certificate SHA1',
'ssl_cert_sha_256': 'Certificate SHA256',
'ssl_cert_sig_algo': 'Certificate Signature Algorithm',
'ssl_cert_ssl_version': 'SSL Version',
'ssl_cert_subject_commonName': 'Reciever Subject Name',
'ssl_cert_subject_countryName': 'Receiver Country Name',
'ssl_cert_subject_localityName': 'Receiver City Name',
'ssl_cert_subject_organizationName': 'Receiver Organization Name',
'ssl_cert_subject_organizationalUnitName': 'Receiver Organization Unit Name',
'ssl_cert_timestamp': 'Certificate DateTime'
}
self.whois_historic_data_items = [
'abuse_emails',
'address',
'city',
'country',
'datetime',
'domain',
'domain_2tld',
'domain_created_datetime',
'domain_expires_datetime',
'domain_updated_datetime',
'email',
'idn_name',
'name',
'nameserver',
'organization',
'phone',
'privacy_punch',
'registrar'
]
self.whois_historic_data_items_friendly_names = {
'abuse_emails': 'Abuse Emails',
'address': 'Address',
'city': 'City',
'country': 'Country',
'datetime': 'Datetime',
'domain': 'Domain',
'domain_2tld': 'Domain 2tld',
'domain_created_datetime': 'Domain Created Time',
'domain_expires_datetime': 'Domain Expires Time',
'domain_updated_datetime': 'Domain Updated Time',
'email': 'Email Address',
'idn_name': 'IDN Name',
'name': 'Name',
'nameserver': 'Nameserver',
'organization': 'Organization',
'phone': 'Phone Info',
'privacy_punch': 'Privacy Punch',
'registrar': 'Registrar'
}
self.whois_current_data_items = [
'abuse_emails',
'address',
'city',
'country',
'datetime',
'domain',
'domain_2tld',
'domain_created_datetime',
'domain_expires_datetime',
'domain_updated_datetime',
'email',
'idn_name',
'name',
'nameserver',
'organization',
'phone',
'privacy_punch',
'registrar',
'state'
]
self.whois_current_data_items_friendly_names = {
'abuse_emails': 'Abuse Emails',
'address': 'Address',
'city': 'City',
'country': 'Country',
'datetime': 'Datetime',
'domain': 'Domain',
'domain_2tld': 'Domain 2tld',
'domain_created_datetime': 'Domain Created Time',
'domain_expires_datetime': 'Domain Expires Time',
'domain_updated_datetime': 'Domain Updated Time',
'email': 'Email Address',
'idn_name': 'IDN Name',
'name': 'Name',
'nameserver': 'Nameserver',
'organization': 'Organization',
'phone': 'Phone',
'privacy_punch': 'Privacy Punch',
'registrar': 'Registrar',
'state': 'State'
}
def create_misp_attributes_and_objects(self, response, endpoint, attribute_value):
flatten_json_response = get_flatten_json_response(response)
data_items: List[str] = []
data_items_friendly_names: Dict[str, str] = {}
if endpoint == DEVICE_GEO_ENDPOINT:
data_items: List[str] = self.device_geo_data_items
data_items_friendly_names: Dict[str, str] = self.device_geo_data_items_friendly_names
elif endpoint == DYNAMIC_DNS_ENDPOINT:
data_items: List[str] = self.dynamic_dns_data_items
data_items_friendly_names: Dict[str, str] = self.dynamic_dns_data_items_friendly_names
elif endpoint == PASSIVE_DNS_ENDPOINT:
data_items: List[str] = self.passive_dns_data_items
data_items_friendly_names: Dict[str, str] = self.passive_dns_data_items_friendly_names
elif endpoint == PASSIVE_HASH_ENDPOINT:
data_items: List[str] = self.passive_hash_data_items
data_items_friendly_names: Dict[str, str] = self.passive_hash_data_items_friendly_names
elif endpoint == SINKHOLE_ENDPOINT:
data_items: List[str] = self.sinkhole_data_items
data_items_friendly_names: Dict[str, str] = self.sinkhole_data_items_friendly_names
elif endpoint == WHOIS_HISTORIC_ENDPOINT:
data_items = self.whois_historic_data_items
data_items_friendly_names = self.whois_historic_data_items_friendly_names
elif endpoint == WHOIS_CURRENT_ENDPOINT:
data_items: List[str] = self.whois_current_data_items
data_items_friendly_names: Dict[str, str] = self.whois_current_data_items_friendly_names
elif endpoint == SSL_CERTIFICATE_ENDPOINT:
data_items: List[str] = self.ssl_certificate_data_items
data_items_friendly_names: Dict[str, str] = self.ssl_certificate_data_items_friendly_names
elif endpoint == MALWARE_INFORMATION_ENDPOINT:
data_items: List[str] = self.malware_information_data_items
data_items_friendly_names = self.malware_information_data_items_friendly_names
elif endpoint == MALWARE_RECORDS_ENDPOINT:
data_items: List[str] = self.malware_records_data_items
data_items_friendly_names = self.malware_records_data_items_friendly_names
elif endpoint == OPEN_SOURCE_INDICATORS_ENDPOINT:
data_items: List[str] = self.os_indicators_data_items
data_items_friendly_names = self.os_indicators_data_items_friendly_names
elif endpoint == C2ATTRIBUTION_ENDPOINT:
data_items: List[str] = self.c2_attribution_data_items
data_items_friendly_names = self.c2_attribution_data_items_friendly_names
for result in flatten_json_response:
hyas_object = misp_object(endpoint, attribute_value)
for data_item in result.keys():
if data_item in data_items:
data_item_text = data_items_friendly_names[data_item]
data_item_value = str(result[data_item])
hyas_object.add_attribute(
**parse_attribute(hyas_object.comment, data_item_text, data_item_value))
hyas_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(hyas_object)
def get_results(self):
"""returns the dictionary object to MISP Instance"""
event = json.loads(self.misp_event.to_json())
results = {key: event[key] for key in ('Attribute', 'Object')}
return {'results': results}
def handler(q=False):
"""The function which accepts a JSON document to expand the values and return a dictionary of the expanded
values. """
if q is False:
return False
request = json.loads(q)
# check if the apikey is provided
if not request.get('config') or not request['config'].get('apikey'):
misperrors['error'] = 'HYAS Insight apikey is missing'
return misperrors
apikey = request['config'].get('apikey')
# check attribute is added to the event
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
attribute = request['attribute']
attribute_type = attribute['type']
attribute_value = attribute['value']
# check if the attribute type is supported by IPQualityScore
if attribute_type not in mispattributes['input']:
return {'error': 'Unsupported attributes type for HYAS Insight Enrichment'}
request_handler = RequestHandler(apikey)
parser = HyasInsightParser(attribute)
has_results = False
if attribute_type in ip_query_input_type:
ip_param = ''
for endpoint in HYAS_IP_ENRICHMENT_ENDPOINTS_LIST:
if endpoint == DEVICE_GEO_ENDPOINT:
if re.match(IPV4_REGEX, attribute_value):
ip_param = IPV4_PARAM
elif re.match(IPV6_REGEX, attribute_value):
ip_param = IPV6_PARAM
elif endpoint == PASSIVE_HASH_ENDPOINT:
ip_param = IPV4_PARAM
elif endpoint == SINKHOLE_ENDPOINT:
ip_param = IPV4_PARAM
elif endpoint == MALWARE_RECORDS_ENDPOINT:
ip_param = IPV4_PARAM
else:
ip_param = IP_PARAM
enrich_response = request_handler.hyas_lookup(endpoint, ip_param, attribute_value)
if endpoint == SSL_CERTIFICATE_ENDPOINT:
enrich_response = enrich_response.get('ssl_certs')
if enrich_response:
has_results = True
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in domain_query_input_type:
for endpoint in HYAS_DOMAIN_ENRICHMENT_ENDPOINTS_LIST:
if not endpoint == WHOIS_CURRENT_ENDPOINT:
enrich_response = request_handler.hyas_lookup(endpoint, DOMAIN_PARAM, attribute_value)
else:
enrich_response = request_handler.hyas_lookup(endpoint, DOMAIN_PARAM, attribute_value,
endpoint == WHOIS_CURRENT_ENDPOINT)
enrich_response = enrich_response.get('items')
if enrich_response:
has_results = True
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in email_query_input_type:
for endpoint in HYAS_EMAIL_ENRICHMENT_ENDPOINTS_LIST:
enrich_response = request_handler.hyas_lookup(endpoint, EMAIL_PARAM, attribute_value)
if enrich_response:
has_results = True
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in phone_query_input_type:
for endpoint in HYAS_PHONE_ENRICHMENT_ENDPOINTS_LIST:
enrich_response = request_handler.hyas_lookup(endpoint, PHONE_PARAM, attribute_value)
if enrich_response:
has_results = True
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in md5_query_input_type:
md5_param = MD5_PARAM
for endpoint in HYAS_MD5_ENRICHMENT_ENDPOINTS_LIST:
if endpoint == MALWARE_INFORMATION_ENDPOINT:
md5_param = HASH_PARAM
enrich_response = request_handler.hyas_lookup(endpoint, md5_param, attribute_value)
if enrich_response:
has_results = True
if endpoint == MALWARE_INFORMATION_ENDPOINT:
enrich_response = malware_info_lookup_to_markdown(enrich_response)
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in sha1_query_input_type:
sha1_param = SHA1_PARAM
for endpoint in HYAS_SHA1_ENRICHMENT_ENDPOINTS_LIST:
if endpoint == MALWARE_INFORMATION_ENDPOINT:
sha1_param = HASH_PARAM
elif endpoint == SSL_CERTIFICATE_ENDPOINT:
sha1_param = HASH_PARAM
enrich_response = request_handler.hyas_lookup(endpoint, sha1_param, attribute_value)
if enrich_response:
has_results = True
if endpoint == MALWARE_INFORMATION_ENDPOINT:
enrich_response = malware_info_lookup_to_markdown(enrich_response)
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in sha256_query_input_type:
sha256_param = SHA256_PARAM
for endpoint in HYAS_SHA256_ENRICHMENT_ENDPOINTS_LIST:
if endpoint == MALWARE_INFORMATION_ENDPOINT:
sha256_param = HASH_PARAM
enrich_response = request_handler.hyas_lookup(endpoint, sha256_param, attribute_value)
if enrich_response:
has_results = True
if endpoint == MALWARE_INFORMATION_ENDPOINT:
enrich_response = malware_info_lookup_to_markdown(enrich_response)
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
elif attribute_type in sha512_query_input_type:
sha512_param = ''
for endpoint in HYAS_SHA512_ENRICHMENT_ENDPOINTS_LIST:
if endpoint == MALWARE_INFORMATION_ENDPOINT:
sha512_param = HASH_PARAM
enrich_response = request_handler.hyas_lookup(endpoint, sha512_param, attribute_value)
if enrich_response:
has_results = True
if endpoint == MALWARE_INFORMATION_ENDPOINT:
enrich_response = malware_info_lookup_to_markdown(enrich_response)
parser.create_misp_attributes_and_objects(enrich_response, endpoint, attribute_value)
if has_results:
return parser.get_results()
else:
return {'error': 'No records found in HYAS Insight for the provided attribute.'}
def introspection():
"""The function that returns a dict of the supported attributes (input and output) by your expansion module."""
return mispattributes
def version():
"""The function that returns a dict with the version and the associated meta-data including potential
configurations required of the module. """
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,627 @@
import json
import logging
import requests
from requests.exceptions import (
HTTPError,
ProxyError,
InvalidURL,
ConnectTimeout
)
from . import check_input_attribute, standard_error_message
from pymisp import MISPEvent, MISPAttribute, MISPObject, MISPTag, Distribution
ip_query_input_type = [
'ip-src',
'ip-dst'
]
url_query_input_type = [
'hostname',
'domain',
'url',
'uri'
]
email_query_input_type = [
'email',
'email-src',
'email-dst',
'target-email',
'whois-registrant-email'
]
phone_query_input_type = [
'phone-number',
'whois-registrant-phone'
]
misperrors = {
'error': 'Error'
}
mispattributes = {
'input': ip_query_input_type + url_query_input_type + email_query_input_type + phone_query_input_type,
'format': 'misp_standard'
}
moduleinfo = {
'version': '0.1',
'author': 'David Mackler',
'description': 'IPQualityScore MISP Expansion Module for IP reputation, Email Validation, Phone Number Validation,'
'Malicious Domain and Malicious URL Scanner.',
'module-type': ['expansion', 'hover']
}
moduleconfig = ['apikey']
logger = logging.getLogger('ipqualityscore')
logger.setLevel(logging.DEBUG)
BASE_URL = 'https://ipqualityscore.com/api/json'
DEFAULT_DISTRIBUTION_SETTING = Distribution.your_organisation_only.value
IP_ENRICH = 'ip'
URL_ENRICH = 'url'
EMAIL_ENRICH = 'email'
PHONE_ENRICH = 'phone'
class RequestHandler:
"""A class for handling any outbound requests from this module."""
def __init__(self, apikey):
self.session = requests.Session()
self.api_key = apikey
def get(self, url: str, headers: dict = None, params: dict = None) -> requests.Response:
"""General get method to fetch the response from IPQualityScore."""
try:
response = self.session.get(
url, headers=headers, params=params
).json()
if str(response["success"]) != "True":
msg = response["message"]
logger.error(f"Error: {msg}")
misperrors["error"] = msg
else:
return response
except (ConnectTimeout, ProxyError, InvalidURL) as error:
msg = "Error connecting with the IPQualityScore."
logger.error(f"{msg} Error: {error}")
misperrors["error"] = msg
def ipqs_lookup(self, reputation_type: str, ioc: str) -> requests.Response:
"""Do a lookup call."""
url = f"{BASE_URL}/{reputation_type}"
payload = {reputation_type: ioc}
headers = {"IPQS-KEY": self.api_key}
try:
response = self.get(url, headers, payload)
except HTTPError as error:
msg = f"Error when requesting data from IPQualityScore. {error.response}: {error.response.reason}"
logger.error(msg)
misperrors["error"] = msg
raise
return response
def parse_attribute(comment, feature, value):
"""Generic Method for parsing the attributes in the object"""
attribute = {
'type': 'text',
'value': value,
'comment': comment,
'distribution': DEFAULT_DISTRIBUTION_SETTING,
'object_relation': feature
}
return attribute
class IPQualityScoreParser:
"""A class for handling the enrichment objects"""
def __init__(self, attribute):
self.rf_white = "#CCCCCC"
self.rf_grey = " #CDCDCD"
self.rf_yellow = "#FFCF00"
self.rf_red = "#D10028"
self.clean = "CLEAN"
self.low = "LOW RISK"
self.medium = "MODERATE RISK"
self.high = "HIGH RISK"
self.critical = "CRITICAL"
self.invalid = "INVALID"
self.suspicious = "SUSPICIOUS"
self.malware = "CRITICAL"
self.phishing = "CRITICAL"
self.disposable = "CRITICAL"
self.attribute = attribute
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
self.ipqs_object = MISPObject('IPQS Fraud and Risk Scoring Object')
self.ipqs_object.template_uuid = "57d066e6-6d66-42a7-a1ad-e075e39b2b5e"
self.ipqs_object.template_id = "1"
self.ipqs_object.description = "IPQS Fraud and Risk Scoring Data"
setattr(self.ipqs_object, 'meta-category', 'network')
description = (
"An object containing the enriched attribute and "
"related entities from IPQualityScore."
)
self.ipqs_object.from_dict(
**{"meta-category": "misc", "description": description, "distribution": DEFAULT_DISTRIBUTION_SETTING}
)
temp_attr = MISPAttribute()
temp_attr.from_dict(**attribute)
self.enriched_attribute = MISPAttribute()
self.enriched_attribute.from_dict(
**{"value": temp_attr.value, "type": temp_attr.type, "distribution": DEFAULT_DISTRIBUTION_SETTING}
)
self.ipqs_object.distribution = DEFAULT_DISTRIBUTION_SETTING
self.ip_data_items = [
'fraud_score',
'country_code',
'region',
'city',
'zip_code',
'ISP',
'ASN',
'organization',
'is_crawler',
'timezone',
'mobile',
'host',
'proxy',
'vpn',
'tor',
'active_vpn',
'active_tor',
'recent_abuse',
'bot_status',
'connection_type',
'abuse_velocity',
'latitude',
'longitude'
]
self.ip_data_items_friendly_names = {
'fraud_score': 'IPQS: Fraud Score',
'country_code': 'IPQS: Country Code',
'region': 'IPQS: Region',
'city': 'IPQS: City',
'zip_code': 'IPQS: Zip Code',
'ISP': 'IPQS: ISP',
'ASN': 'IPQS: ASN',
'organization': 'IPQS: Organization',
'is_crawler': 'IPQS: Is Crawler',
'timezone': 'IPQS: Timezone',
'mobile': 'IPQS: Mobile',
'host': 'IPQS: Host',
'proxy': 'IPQS: Proxy',
'vpn': 'IPQS: VPN',
'tor': 'IPQS: TOR',
'active_vpn': 'IPQS: Active VPN',
'active_tor': 'IPQS: Active TOR',
'recent_abuse': 'IPQS: Recent Abuse',
'bot_status': 'IPQS: Bot Status',
'connection_type': 'IPQS: Connection Type',
'abuse_velocity': 'IPQS: Abuse Velocity',
'latitude': 'IPQS: Latitude',
'longitude': 'IPQS: Longitude'
}
self.url_data_items = [
'unsafe',
'domain',
'ip_address',
'server',
'domain_rank',
'dns_valid',
'parking',
'spamming',
'malware',
'phishing',
'suspicious',
'adult',
'risk_score',
'category',
'domain_age'
]
self.url_data_items_friendly_names = {
'unsafe': 'IPQS: Unsafe',
'domain': 'IPQS: Domain',
'ip_address': 'IPQS: IP Address',
'server': 'IPQS: Server',
'domain_rank': 'IPQS: Domain Rank',
'dns_valid': 'IPQS: DNS Valid',
'parking': 'IPQS: Parking',
'spamming': 'IPQS: Spamming',
'malware': 'IPQS: Malware',
'phishing': 'IPQS: Phishing',
'suspicious': 'IPQS: Suspicious',
'adult': 'IPQS: Adult',
'risk_score': 'IPQS: Risk Score',
'category': 'IPQS: Category',
'domain_age': 'IPQS: Domain Age'
}
self.email_data_items = [
'valid',
'disposable',
'smtp_score',
'overall_score',
'first_name',
'generic',
'common',
'dns_valid',
'honeypot',
'deliverability',
'frequent_complainer',
'spam_trap_score',
'catch_all',
'timed_out',
'suspect',
'recent_abuse',
'fraud_score',
'suggested_domain',
'leaked',
'sanitized_email',
'domain_age',
'first_seen'
]
self.email_data_items_friendly_names = {
'valid': 'IPQS: Valid',
'disposable': 'IPQS: Disposable',
'smtp_score': 'IPQS: SMTP Score',
'overall_score': 'IPQS: Overall Score',
'first_name': 'IPQS: First Name',
'generic': 'IPQS: Generic',
'common': 'IPQS: Common',
'dns_valid': 'IPQS: DNS Valid',
'honeypot': 'IPQS: Honeypot',
'deliverability': 'IPQS: Deliverability',
'frequent_complainer': 'IPQS: Frequent Complainer',
'spam_trap_score': 'IPQS: Spam Trap Score',
'catch_all': 'IPQS: Catch All',
'timed_out': 'IPQS: Timed Out',
'suspect': 'IPQS: Suspect',
'recent_abuse': 'IPQS: Recent Abuse',
'fraud_score': 'IPQS: Fraud Score',
'suggested_domain': 'IPQS: Suggested Domain',
'leaked': 'IPQS: Leaked',
'sanitized_email': 'IPQS: Sanitized Email',
'domain_age': 'IPQS: Domain Age',
'first_seen': 'IPQS: First Seen'
}
self.phone_data_items = [
'formatted',
'local_format',
'valid',
'fraud_score',
'recent_abuse',
'VOIP',
'prepaid',
'risky',
'active',
'carrier',
'line_type',
'country',
'city',
'zip_code',
'region',
'dialing_code',
'active_status',
'leaked',
'name',
'timezone',
'do_not_call',
]
self.phone_data_items_friendly_names = {
'formatted': 'IPQS: Formatted',
'local_format': 'IPQS: Local Format',
'valid': 'IPQS: Valid',
'fraud_score': 'IPQS: Fraud Score',
'recent_abuse': 'IPQS: Recent Abuse',
'VOIP': 'IPQS: VOIP',
'prepaid': 'IPQS: Prepaid',
'risky': 'IPQS: Risky',
'active': 'IPQS: Active',
'carrier': 'IPQS: Carrier',
'line_type': 'IPQS: Line Type',
'country': 'IPQS: Country',
'city': 'IPQS: City',
'zip_code': 'IPQS: Zip Code',
'region': 'IPQS: Region',
'dialing_code': 'IPQS: Dialing Code',
'active_status': 'IPQS: Active Status',
'leaked': 'IPQS: Leaked',
'name': 'IPQS: Name',
'timezone': 'IPQS: Timezone',
'do_not_call': 'IPQS: Do Not Call',
}
self.timestamp_items_friendly_name = {
'human': ' Human',
'timestamp': ' Timestamp',
'iso': ' ISO'
}
self.timestamp_items = [
'human',
'timestamp',
'iso'
]
def criticality_color(self, criticality) -> str:
"""method which maps the color to the criticality level"""
mapper = {
self.clean: self.rf_grey,
self.low: self.rf_grey,
self.medium: self.rf_yellow,
self.suspicious: self.rf_yellow,
self.high: self.rf_red,
self.critical: self.rf_red,
self.invalid: self.rf_red,
self.disposable: self.rf_red,
self.malware: self.rf_red,
self.phishing: self.rf_red
}
return mapper.get(criticality, self.rf_white)
def add_tag(self, tag_name: str, hex_color: str = None) -> None:
"""Helper method for adding a tag to the enriched attribute."""
tag = MISPTag()
tag_properties = {"name": tag_name}
if hex_color:
tag_properties["colour"] = hex_color
tag.from_dict(**tag_properties)
self.enriched_attribute.add_tag(tag)
def ipqs_parser(self, query_response, enrich_type):
""" helper method to call the enrichment function according to the type"""
if enrich_type == IP_ENRICH:
self.ip_reputation_data(query_response)
elif enrich_type == URL_ENRICH:
self.url_reputation_data(query_response)
elif enrich_type == EMAIL_ENRICH:
self.email_reputation_data(query_response)
elif enrich_type == PHONE_ENRICH:
self.phone_reputation_data(query_response)
def ip_reputation_data(self, query_response):
"""method to create object for IP address"""
comment = "Results from IPQualityScore IP Reputation API"
for ip_data_item in self.ip_data_items:
if ip_data_item in query_response:
data_item = self.ip_data_items_friendly_names[ip_data_item]
data_item_value = str(query_response[ip_data_item])
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
if ip_data_item == "fraud_score":
fraud_score = int(data_item_value)
self.ip_address_risk_scoring(fraud_score)
self.ipqs_object.add_attribute(
"Enriched attribute", **self.enriched_attribute
)
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(self.ipqs_object)
def ip_address_risk_scoring(self, score):
"""method to create calculate verdict for IP Address"""
risk_criticality = ""
if score == 100:
risk_criticality = self.critical
elif 85 <= score <= 99:
risk_criticality = self.high
elif 75 <= score <= 84:
risk_criticality = self.medium
elif 60 <= score <= 74:
risk_criticality = self.suspicious
elif score <= 59:
risk_criticality = self.clean
hex_color = self.criticality_color(risk_criticality)
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
self.add_tag(tag_name, hex_color)
def url_reputation_data(self, query_response):
"""method to create object for URL/Domain"""
malware = False
phishing = False
risk_score = 0
comment = "Results from IPQualityScore Malicious URL Scanner API"
for url_data_item in self.url_data_items:
if url_data_item in query_response:
data_item_value = ""
if url_data_item == "domain_age":
for timestamp_item in self.timestamp_items:
data_item = self.url_data_items_friendly_names[url_data_item] + \
self.timestamp_items_friendly_name[timestamp_item]
data_item_value = str(query_response[url_data_item][timestamp_item])
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
else:
data_item = self.url_data_items_friendly_names[url_data_item]
data_item_value = str(query_response[url_data_item])
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
if url_data_item == "malware":
malware = data_item_value
if url_data_item == "phishing":
phishing = data_item_value
if url_data_item == "risk_score":
risk_score = int(data_item_value)
self.url_risk_scoring(risk_score, malware, phishing)
self.ipqs_object.add_attribute(
"Enriched attribute", **self.enriched_attribute
)
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(self.ipqs_object)
def url_risk_scoring(self, score, malware, phishing):
"""method to create calculate verdict for URL/Domain"""
risk_criticality = ""
if malware == 'True':
risk_criticality = self.malware
elif phishing == 'True':
risk_criticality = self.phishing
elif score >= 90:
risk_criticality = self.high
elif 80 <= score <= 89:
risk_criticality = self.medium
elif 70 <= score <= 79:
risk_criticality = self.low
elif 55 <= score <= 69:
risk_criticality = self.suspicious
elif score <= 54:
risk_criticality = self.clean
hex_color = self.criticality_color(risk_criticality)
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
self.add_tag(tag_name, hex_color)
def email_reputation_data(self, query_response):
"""method to create object for Email Address"""
comment = "Results from IPQualityScore Email Verification API"
disposable = False
valid = False
fraud_score = 0
for email_data_item in self.email_data_items:
if email_data_item in query_response:
data_item_value = ""
if email_data_item not in ("domain_age", "first_seen"):
data_item = self.email_data_items_friendly_names[email_data_item]
data_item_value = str(query_response[email_data_item])
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
else:
for timestamp_item in self.timestamp_items:
data_item = self.email_data_items_friendly_names[email_data_item] + \
self.timestamp_items_friendly_name[timestamp_item]
data_item_value = str(query_response[email_data_item][timestamp_item])
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
if email_data_item == "disposable":
disposable = data_item_value
if email_data_item == "valid":
valid = data_item_value
if email_data_item == "fraud_score":
fraud_score = int(data_item_value)
self.email_address_risk_scoring(fraud_score, disposable, valid)
self.ipqs_object.add_attribute(
"Enriched attribute", **self.enriched_attribute
)
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(self.ipqs_object)
def email_address_risk_scoring(self, score, disposable, valid):
"""method to create calculate verdict for Email Address"""
risk_criticality = ""
if disposable == "True":
risk_criticality = self.disposable
elif valid == "False":
risk_criticality = self.invalid
elif score == 100:
risk_criticality = self.high
elif 88 <= score <= 99:
risk_criticality = self.medium
elif 80 <= score <= 87:
risk_criticality = self.low
elif score <= 79:
risk_criticality = self.clean
hex_color = self.criticality_color(risk_criticality)
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
self.add_tag(tag_name, hex_color)
def phone_reputation_data(self, query_response):
"""method to create object for Phone Number"""
fraud_score = 0
valid = False
active = False
comment = "Results from IPQualityScore Phone Number Validation API"
for phone_data_item in self.phone_data_items:
if phone_data_item in query_response:
data_item = self.phone_data_items_friendly_names[phone_data_item]
data_item_value = str(query_response[phone_data_item])
self.ipqs_object.add_attribute(**parse_attribute(comment, data_item, data_item_value))
if phone_data_item == "active":
active = data_item_value
if phone_data_item == "valid":
valid = data_item_value
if phone_data_item == "fraud_score":
fraud_score = int(data_item_value)
self.phone_address_risk_scoring(fraud_score, valid, active)
self.ipqs_object.add_attribute(
"Enriched attribute", **self.enriched_attribute
)
self.ipqs_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(self.ipqs_object)
def phone_address_risk_scoring(self, score, valid, active):
"""method to create calculate verdict for Phone Number"""
risk_criticality = ""
if valid == "False":
risk_criticality = self.medium
elif active == "False":
risk_criticality = self.medium
elif 90 <= score <= 100:
risk_criticality = self.high
elif 80 <= score <= 89:
risk_criticality = self.low
elif 50 <= score <= 79:
risk_criticality = self.suspicious
elif score <= 49:
risk_criticality = self.clean
hex_color = self.criticality_color(risk_criticality)
tag_name = f'IPQS:VERDICT="{risk_criticality}"'
self.add_tag(tag_name, hex_color)
def get_results(self):
"""returns the dictionary object to MISP Instance"""
event = json.loads(self.misp_event.to_json())
results = {key: event[key] for key in ('Attribute', 'Object')}
return {'results': results}
def handler(q=False):
"""The function which accepts a JSON document to expand the values and return a dictionary of the expanded
values. """
if q is False:
return False
request = json.loads(q)
# check if the apikey is provided
if not request.get('config') or not request['config'].get('apikey'):
misperrors['error'] = 'IPQualityScore apikey is missing'
return misperrors
apikey = request['config'].get('apikey')
# check attribute is added to the event
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
attribute = request['attribute']
attribute_type = attribute['type']
attribute_value = attribute['value']
# check if the attribute type is supported by IPQualityScore
if attribute_type not in mispattributes['input']:
return {'error': 'Unsupported attributes type for IPqualityScore Enrichment'}
request_handler = RequestHandler(apikey)
enrich_type = ""
if attribute_type in ip_query_input_type:
enrich_type = IP_ENRICH
json_response = request_handler.ipqs_lookup(IP_ENRICH, attribute_value)
elif attribute_type in url_query_input_type:
enrich_type = URL_ENRICH
json_response = request_handler.ipqs_lookup(URL_ENRICH, attribute_value)
elif attribute_type in email_query_input_type:
enrich_type = EMAIL_ENRICH
json_response = request_handler.ipqs_lookup(EMAIL_ENRICH, attribute_value)
elif attribute_type in phone_query_input_type:
enrich_type = PHONE_ENRICH
json_response = request_handler.ipqs_lookup(PHONE_ENRICH, attribute_value)
parser = IPQualityScoreParser(attribute)
parser.ipqs_parser(json_response, enrich_type)
return parser.get_results()
def introspection():
"""The function that returns a dict of the supported attributes (input and output) by your expansion module."""
return mispattributes
def version():
"""The function that returns a dict with the version and the associated meta-data including potential
configurations required of the module. """
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,44 @@
#!/usr/bin/env python\
import json
from jinja2.sandbox import SandboxedEnvironment
misperrors = {'error': 'Error'}
mispattributes = {'input': ['text'], 'output': ['text']}
moduleinfo = {'version': '0.1', 'author': 'Sami Mokaddem',
'description': 'Render the template with the data passed',
'module-type': ['expansion']}
default_template = '- Default template -'
def renderTemplate(data, template=default_template):
env = SandboxedEnvironment()
return env.from_string(template).render(data)
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('text'):
data = request['text']
else:
return False
data = json.loads(data)
template = data.get('template', default_template)
templateData = data.get('data', {})
try:
rendered = renderTemplate(templateData, template)
except TypeError:
rendered = ''
r = {'results': [{'types': mispattributes['output'],
'values':[rendered]}]}
return r
def introspection():
return mispattributes
def version():
return moduleinfo

View File

@ -11,7 +11,7 @@ inputSource = ['link']
moduleinfo = {'version': '0.2', 'author': 'Christian Studer',
'description': 'Query Joe Sandbox API with a report URL to get the parsed data.',
'module-type': ['expansion']}
moduleconfig = ['apiurl', 'apikey', 'import_pe', 'import_mitre_attack']
moduleconfig = ['apiurl', 'apikey', 'import_executable', 'import_mitre_attack']
def handler(q=False):
@ -21,7 +21,7 @@ def handler(q=False):
apiurl = request['config'].get('apiurl') or 'https://jbxcloud.joesecurity.org/api'
apikey = request['config'].get('apikey')
parser_config = {
"import_pe": request["config"].get('import_pe', "false") == "true",
"import_executable": request["config"].get('import_executable', "false") == "true",
"mitre_attack": request["config"].get('import_mitre_attack', "false") == "true",
}

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python3
"""
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
Module (type "expansion") to query a Lastline report from an analysis link.
"""
import json

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python3
"""
Deprecation notice: this module will be deprecated by December 2021, please use vmware_nsx module.
Module (type "expansion") to submit files and URLs to Lastline for analysis.
"""
import base64

View File

@ -0,0 +1,239 @@
# Written by mohlcyber 13.08.2021
# MISP Module for McAfee MVISION Insights to query campaign details
import json
import logging
import requests
import sys
from . import check_input_attribute, standard_error_message
from pymisp import MISPAttribute, MISPEvent, MISPObject
misperrors = {'error': 'Error'}
mispattributes = {'input': ["md5", "sha1", "sha256"],
'format': 'misp_standard'}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Martin Ohl',
'description': 'Lookup McAfee MVISION Insights Details',
'module-type': ['hover']}
# config fields that your code expects from the site admin
moduleconfig = ['api_key', 'client_id', 'client_secret']
class MVAPI():
def __init__(self, attribute, api_key, client_id, client_secret):
self.misp_event = MISPEvent()
self.attribute = MISPAttribute()
self.attribute.from_dict(**attribute)
self.misp_event.add_attribute(**self.attribute)
self.base_url = 'https://api.mvision.mcafee.com'
self.session = requests.Session()
self.api_key = api_key
auth = (client_id, client_secret)
self.logging()
self.auth(auth)
def logging(self):
self.logger = logging.getLogger('logs')
self.logger.setLevel('INFO')
handler = logging.StreamHandler()
formatter = logging.Formatter("%(asctime)s;%(levelname)s;%(message)s")
handler.setFormatter(formatter)
self.logger.addHandler(handler)
def auth(self, auth):
iam_url = "https://iam.mcafee-cloud.com/iam/v1.1/token"
headers = {
'x-api-key': self.api_key,
'Content-Type': 'application/vnd.api+json'
}
payload = {
"grant_type": "client_credentials",
"scope": "ins.user ins.suser ins.ms.r"
}
res = self.session.post(iam_url, headers=headers, auth=auth, data=payload)
if res.status_code != 200:
self.logger.error('Could not authenticate to get the IAM token: {0} - {1}'.format(res.status_code, res.text))
sys.exit()
else:
self.logger.info('Successful authenticated.')
access_token = res.json()['access_token']
headers['Authorization'] = 'Bearer ' + access_token
self.session.headers = headers
def search_ioc(self):
filters = {
'filter[type][eq]': self.attribute.type,
'filter[value]': self.attribute.value,
'fields': 'id, type, value, coverage, uid, is_coat, is_sdb_dirty, category, comment, campaigns, threat, prevalence'
}
res = self.session.get(self.base_url + '/insights/v2/iocs', params=filters)
if res.ok:
if len(res.json()['data']) == 0:
self.logger.info('No Hash details in MVISION Insights found.')
else:
self.logger.info('Successfully retrieved MVISION Insights details.')
self.logger.debug(res.text)
return res.json()
else:
self.logger.error('Error in search_ioc. HTTP {0} - {1}'.format(str(res.status_code), res.text))
sys.exit()
def prep_result(self, ioc):
res = ioc['data'][0]
results = []
# Parse out Attribute Category
category_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Attribute Category: {0}'.format(res['attributes']['category'])
}
results.append(category_attr)
# Parse out Attribute Comment
comment_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Attribute Comment: {0}'.format(res['attributes']['comment'])
}
results.append(comment_attr)
# Parse out Attribute Dat Coverage
cover_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Dat Version Coverage: {0}'.format(res['attributes']['coverage']['dat_version']['min'])
}
results.append(cover_attr)
# Parse out if Dirty
cover_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Is Dirty: {0}'.format(res['attributes']['is-sdb-dirty'])
}
results.append(cover_attr)
# Parse our targeted countries
countries_dict = []
countries = res['attributes']['prevalence']['countries']
for country in countries:
countries_dict.append(country['iso_code'])
country_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Targeted Countries: {0}'.format(countries_dict)
}
results.append(country_attr)
# Parse out targeted sectors
sectors_dict = []
sectors = res['attributes']['prevalence']['sectors']
for sector in sectors:
sectors_dict.append(sector['sector'])
sector_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Targeted Sectors: {0}'.format(sectors_dict)
}
results.append(sector_attr)
# Parse out Threat Classification
threat_class_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Threat Classification: {0}'.format(res['attributes']['threat']['classification'])
}
results.append(threat_class_attr)
# Parse out Threat Name
threat_name_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Threat Name: {0}'.format(res['attributes']['threat']['name'])
}
results.append(threat_name_attr)
# Parse out Threat Severity
threat_sev_attr = {
'type': 'text',
'object_relation': 'text',
'value': 'Threat Severity: {0}'.format(res['attributes']['threat']['severity'])
}
results.append(threat_sev_attr)
# Parse out Attribute ID
attr_id = {
'type': 'text',
'object_relation': 'text',
'value': 'Attribute ID: {0}'.format(res['id'])
}
results.append(attr_id)
# Parse out Campaign Relationships
campaigns = ioc['included']
for campaign in campaigns:
campaign_attr = {
'type': 'campaign-name',
'object_relation': 'campaign-name',
'value': campaign['attributes']['name']
}
results.append(campaign_attr)
mv_insights_obj = MISPObject(name='MVISION Insights Details')
for mvi_res in results:
mv_insights_obj.add_attribute(**mvi_res)
mv_insights_obj.add_reference(self.attribute.uuid, 'mvision-insights-details')
self.misp_event.add_object(mv_insights_obj)
event = json.loads(self.misp_event.to_json())
results_mvi = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results_mvi}
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if not request.get('config') or not request['config'].get('api_key') or not request['config'].get('client_id') or not request['config'].get('client_secret'):
misperrors['error'] = "Please provide MVISION API Key, Client ID and Client Secret."
return misperrors
if request['attribute']['type'] not in mispattributes['input']:
return {'error': 'Unsupported attribute type. Please use {0}'.format(mispattributes['input'])}
api_key = request['config']['api_key']
client_id = request['config']['client_id']
client_secret = request['config']['client_secret']
attribute = request['attribute']
mvi = MVAPI(attribute, api_key, client_id, client_secret)
res = mvi.search_ioc()
return mvi.prep_result(res)
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,129 @@
import json
import requests
from . import check_input_attribute, standard_error_message
from pymisp import MISPEvent, MISPObject
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-src|port', 'ip-dst', 'ip-dst|port'], 'format': 'misp_standard'}
moduleinfo = {'version': '1', 'author': 'Jeroen Pinoy',
'description': "An expansion module to enrich an ip with geolocation and asn information from an mmdb server "
"such as ip.circl.lu.",
'module-type': ['expansion', 'hover']}
moduleconfig = ["custom_API", "db_source_filter"]
mmdblookup_url = 'https://ip.circl.lu/'
class MmdbLookupParser():
def __init__(self, attribute, mmdblookupresult, api_url):
self.attribute = attribute
self.mmdblookupresult = mmdblookupresult
self.api_url = api_url
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
def get_result(self):
event = json.loads(self.misp_event.to_json())
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
def parse_mmdblookup_information(self):
# There is a chance some db's have a hit while others don't so we have to check if entry is empty each time
for result_entry in self.mmdblookupresult:
if result_entry['country_info']:
mmdblookup_object = MISPObject('geolocation')
mmdblookup_object.add_attribute('country',
**{'type': 'text', 'value': result_entry['country_info']['Country']})
mmdblookup_object.add_attribute('countrycode',
**{'type': 'text', 'value': result_entry['country']['iso_code']})
mmdblookup_object.add_attribute('latitude',
**{'type': 'float',
'value': result_entry['country_info']['Latitude (average)']})
mmdblookup_object.add_attribute('longitude',
**{'type': 'float',
'value': result_entry['country_info']['Longitude (average)']})
mmdblookup_object.add_attribute('text',
**{'type': 'text',
'value': 'db_source: {}. build_db: {}. Latitude and longitude are country average.'.format(
result_entry['meta']['db_source'],
result_entry['meta']['build_db'])})
mmdblookup_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(mmdblookup_object)
if 'AutonomousSystemNumber' in result_entry['country']:
mmdblookup_object_asn = MISPObject('asn')
mmdblookup_object_asn.add_attribute('asn',
**{'type': 'text',
'value': result_entry['country'][
'AutonomousSystemNumber']})
mmdblookup_object_asn.add_attribute('description',
**{'type': 'text',
'value': 'ASNOrganization: {}. db_source: {}. build_db: {}.'.format(
result_entry['country'][
'AutonomousSystemOrganization'],
result_entry['meta']['db_source'],
result_entry['meta']['build_db'])})
mmdblookup_object_asn.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(mmdblookup_object_asn)
def check_url(url):
return "{}/".format(url) if not url.endswith('/') else url
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
attribute = request['attribute']
if attribute.get('type') == 'ip-src':
toquery = attribute['value']
elif attribute.get('type') == 'ip-src|port':
toquery = attribute['value'].split('|')[0]
elif attribute.get('type') == 'ip-dst':
toquery = attribute['value']
elif attribute.get('type') == 'ip-dst|port':
toquery = attribute['value'].split('|')[0]
else:
misperrors['error'] = 'There is no attribute of type ip-src or ip-dst provided as input'
return misperrors
api_url = check_url(request['config']['custom_API']) if 'config' in request and request['config'].get(
'custom_API') else mmdblookup_url
r = requests.get("{}/geolookup/{}".format(api_url, toquery))
if r.status_code == 200:
mmdblookupresult = r.json()
if not mmdblookupresult or len(mmdblookupresult) == 0:
misperrors['error'] = 'Empty result returned by server'
return misperrors
if 'config' in request and request['config'].get('db_source_filter'):
db_source_filter = request['config'].get('db_source_filter')
mmdblookupresult = [entry for entry in mmdblookupresult if entry['meta']['db_source'] == db_source_filter]
if not mmdblookupresult or len(mmdblookupresult) == 0:
misperrors['error'] = 'There was no result with the selected db_source'
return misperrors
# Server might return one or multiple entries which could all be empty, we check if there is at least one
# non-empty result below
empty_result = True
for lookup_result_entry in mmdblookupresult:
if lookup_result_entry['country_info']:
empty_result = False
break
if empty_result:
misperrors['error'] = 'Empty result returned by server'
return misperrors
else:
misperrors['error'] = 'API not accessible - http status code {} was returned'.format(r.status_code)
return misperrors
parser = MmdbLookupParser(attribute, mmdblookupresult, api_url)
parser.parse_mmdblookup_information()
result = parser.get_result()
return result
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,142 @@
import json
import sys
import base64
#from distutils.util import strtobool
import io
import zipfile
from pymisp import PyMISP
from mwdblib import MWDB
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment', 'malware-sample'], 'output': ['link']}
moduleinfo = {'version': '1', 'author': 'Koen Van Impe',
'description': 'Module to push malware samples to a MWDB instance',
'module-type': ['expansion']}
moduleconfig = ['mwdb_apikey', 'mwdb_url', 'mwdb_misp_attribute', 'mwdb_public', 'include_tags_event', 'include_tags_attribute']
pymisp_keys_file = "/var/www/MISP/PyMISP/"
mwdb_public_default = True
"""
An expansion module to push malware samples to a MWDB (https://github.com/CERT-Polska/mwdb-core) instance.
This module does not push samples to a sandbox. This can be achieved via Karton (connected to the MWDB)
Does:
- Upload of attachment or malware sample to MWDB
- Tags of events and/or attributes are added to MWDB.
- Comment of the MISP attribute is added to MWDB.
- A link back to the MISP event is added to MWDB via the MWDB attribute.
- A link to the MWDB attribute is added as an enriched attribute to the MISP event.
Requires
- mwdblib installed (pip install mwdblib)
- (optional) keys.py file to add tags of events/attributes to MWDB
- (optional) MWDB "attribute" created for the link back to MISP (defined in mwdb_misp_attribute)
"""
def handler(q=False):
if q is False:
return False
request = json.loads(q)
try:
data = request.get("data")
if 'malware-sample' in request:
# malicious samples are encrypted with zip (password infected) and then base64 encoded
sample_filename = request.get("malware-sample").split("|", 1)[0]
data = base64.b64decode(data)
fl = io.BytesIO(data)
zf = zipfile.ZipFile(fl)
sample_hashname = zf.namelist()[0]
data = zf.read(sample_hashname, b"infected")
zf.close()
elif 'attachment' in request:
# All attachments get base64 encoded
sample_filename = request.get("attachment")
data = base64.b64decode(data)
else:
misperrors['error'] = "No malware sample or attachment supplied"
return misperrors
except Exception:
misperrors['error'] = "Unable to process submited sample data"
return misperrors
if (request["config"].get("mwdb_apikey") is None) or (request["config"].get("mwdb_url") is None):
misperrors["error"] = "Missing MWDB API key or server URL"
return misperrors
mwdb_misp_attribute = request["config"].get("mwdb_misp_attribute")
mwdb_public = request["config"].get("mwdb_public", mwdb_public_default)
include_tags_event = request["config"].get("include_tags_event")
include_tags_attribute = request["config"].get("include_tags_attribute")
misp_event_id = request.get("event_id")
misp_attribute_uuid = request.get("attribute_uuid")
misp_attribute_comment = ""
mwdb_tags = []
misp_info = ""
try:
if include_tags_event:
sys.path.append(pymisp_keys_file)
from keys import misp_url, misp_key, misp_verifycert
misp = PyMISP(misp_url, misp_key, misp_verifycert, False)
misp_event = misp.get_event(misp_event_id)
if "Event" in misp_event:
misp_info = misp_event["Event"]["info"]
if "Tag" in misp_event["Event"]:
tags = misp_event["Event"]["Tag"]
for tag in tags:
if "misp-galaxy" not in tag["name"]:
mwdb_tags.append(tag["name"])
if include_tags_attribute:
sys.path.append(pymisp_keys_file)
from keys import misp_url, misp_key, misp_verifycert
misp = PyMISP(misp_url, misp_key, misp_verifycert, False)
misp_attribute = misp.get_attribute(misp_attribute_uuid)
if "Attribute" in misp_attribute:
if "Tag" in misp_attribute["Attribute"]:
tags = misp_attribute["Attribute"]["Tag"]
for tag in tags:
if "misp-galaxy" not in tag["name"]:
mwdb_tags.append(tag["name"])
misp_attribute_comment = misp_attribute["Attribute"]["comment"]
except Exception:
misperrors['error'] = "Unable to read PyMISP (keys.py) configuration file"
return misperrors
try:
mwdb = MWDB(api_key=request["config"].get("mwdb_apikey"), api_url=request["config"].get("mwdb_url"))
if mwdb_misp_attribute and len(mwdb_misp_attribute) > 0:
metakeys = {mwdb_misp_attribute: misp_event_id}
else:
metakeys = False
file_object = mwdb.upload_file(sample_filename, data, metakeys=metakeys, public=mwdb_public)
for tag in mwdb_tags:
file_object.add_tag(tag)
if len(misp_attribute_comment) < 1:
misp_attribute_comment = "MISP attribute {}".format(misp_attribute_uuid)
file_object.add_comment(misp_attribute_comment)
if len(misp_event) > 0:
file_object.add_comment("Fetched from event {} - {}".format(misp_event_id, misp_info))
mwdb_link = request["config"].get("mwdb_url").replace("/api", "/file/") + "{}".format(file_object.md5)
except Exception:
misperrors['error'] = "Unable to send sample to MWDB instance"
return misperrors
r = {'results': [{'types': 'link', 'values': mwdb_link, 'comment': 'Link to MWDB sample'}]}
return r
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -6,14 +6,21 @@ import pytesseract
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'output': ['freetext']}
moduleinfo = {'version': '0.2', 'author': 'Sascha Rommelfangen',
'description': 'OCR decoder',
'module-type': ['expansion']}
moduleconfig = []
def filter_decoded(decoded):
for line in decoded.split('\n'):
decoded_line = line.strip('\t\x0b\x0c\r ')
if decoded_line:
yield decoded_line
def handler(q=False):
if q is False:
return False
@ -31,9 +38,16 @@ def handler(q=False):
image = img_array
image = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
try:
decoded = pytesseract.image_to_string(image)
return {'results': [{'types': ['freetext'], 'values': decoded, 'comment': "OCR from file " + filename},
{'types': ['text'], 'values': decoded, 'comment': "ORC from file " + filename}]}
decoded = pytesseract.image_to_string(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
return {
'results': [
{
'types': ['freetext'],
'values': list(filter_decoded(decoded)),
'comment': f"OCR from file {filename}"
}
]
}
except Exception as e:
print(e)
err = "Couldn't analyze file type. Only images are supported right now."

View File

@ -4,6 +4,7 @@ import np
import ezodf
import pandas_ods_reader
import io
import logging
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
@ -35,13 +36,12 @@ def handler(q=False):
num_sheets = len(doc.sheets)
try:
for i in range(0, num_sheets):
ods = pandas_ods_reader.read_ods(ods_file, i, headers=False)
ods = pandas_ods_reader.algo.read_data(pandas_ods_reader.parsers.ods, ods_file, i, headers=False)
ods_content = ods_content + "\n" + ods.to_string(max_rows=None)
print(ods_content)
return {'results': [{'types': ['freetext'], 'values': ods_content, 'comment': ".ods-to-text from file " + filename},
{'types': ['text'], 'values': ods_content, 'comment': ".ods-to-text from file " + filename}]}
except Exception as e:
print(e)
logging.exception(e)
err = "Couldn't analyze file as .ods. Error was: " + str(e)
misperrors['error'] = err
return misperrors

View File

@ -1,6 +1,9 @@
# -*- coding: utf-8 -*-
import json
from pymisp import MISPEvent, MISPObject
try:
from onyphe import Onyphe
except ImportError:
@ -9,9 +12,10 @@ except ImportError:
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'hostname', 'domain'],
'output': ['hostname', 'domain', 'ip-src', 'ip-dst', 'url']}
'output': ['hostname', 'domain', 'ip-src', 'ip-dst', 'url'],
'format': 'misp_standard'}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven',
moduleinfo = {'version': '2', 'author': 'Sebastien Larinier @sebdraven',
'description': 'Query on Onyphe',
'module-type': ['expansion', 'hover']}
@ -19,84 +23,205 @@ moduleinfo = {'version': '1', 'author': 'Sebastien Larinier @sebdraven',
moduleconfig = ['apikey']
class OnypheClient:
def __init__(self, api_key, attribute):
self.onyphe_client = Onyphe(api_key=api_key)
self.attribute = attribute
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
def get_results(self):
event = json.loads(self.misp_event.to_json())
results = {key: event[key]
for key in ('Attribute', 'Object') if key in event}
return results
def get_query_onyphe(self):
if self.attribute['type'] == 'ip-src' or self.attribute['type'] == 'ip-dst':
self.__summary_ip()
if self.attribute['type'] == 'domain':
self.__summary_domain()
if self.attribute['type'] == 'hostname':
self.__summary_hostname()
def __summary_ip(self):
results = self.onyphe_client.summary_ip(self.attribute['value'])
if 'results' in results:
for r in results['results']:
if 'domain' in r:
domain = r['domain']
if type(domain) == list:
for d in domain:
self.__get_object_domain_ip(d, 'domain')
elif type(domain) == str:
self.__get_object_domain_ip(domain, 'domain')
if 'hostname' in r:
hostname = r['hostname']
if type(hostname) == list:
for d in hostname:
self.__get_object_domain_ip(d, 'domain')
elif type(hostname) == str:
self.__get_object_domain_ip(hostname, 'domain')
if 'issuer' in r:
self.__get_object_certificate(r)
def __summary_domain(self):
results = self.onyphe_client.summary_domain(self.attribute['value'])
if 'results' in results:
for r in results['results']:
for domain in r.get('domain'):
self.misp_event.add_attribute('domain', domain)
for hostname in r.get('hostname'):
self.misp_event.add_attribute('hostname', hostname)
if 'ip' in r:
if type(r['ip']) is str:
self.__get_object_domain_ip(r['ip'], 'ip')
if type(r['ip']) is list:
for ip in r['ip']:
self.__get_object_domain_ip(ip, 'ip')
if 'issuer' in r:
self.__get_object_certificate(r)
def __summary_hostname(self):
results = self.onyphe_client.summary_hostname(self.attribute['value'])
if 'results' in results:
for r in results['results']:
if 'domain' in r:
if type(r['domain']) is str:
self.misp_event.add_attribute(
'domain', r['domain'])
if type(r['domain']) is list:
for domain in r['domain']:
self.misp_event.add_attribute('domain', domain)
if 'hostname' in r:
if type(r['hostname']) is str:
self.misp_event.add_attribute(
'hostname', r['hostname'])
if type(r['hostname']) is list:
for hostname in r['hostname']:
self.misp_event.add_attribute(
'hostname', hostname)
if 'ip' in r:
if type(r['ip']) is str:
self.__get_object_domain_ip(r['ip'], 'ip')
if type(r['ip']) is list:
for ip in r['ip']:
self.__get_object_domain_ip(ip, 'ip')
if 'issuer' in r:
self.__get_object_certificate(r)
if 'cve' in r:
if type(r['cve']) is list:
for cve in r['cve']:
self.__get_object_cve(r, cve)
def __get_object_certificate(self, r):
object_certificate = MISPObject('x509')
object_certificate.add_attribute('ip', self.attribute['value'])
object_certificate.add_attribute('serial-number', r['serial'])
object_certificate.add_attribute(
'x509-fingerprint-sha256', r['fingerprint']['sha256'])
object_certificate.add_attribute(
'x509-fingerprint-sha1', r['fingerprint']['sha1'])
object_certificate.add_attribute(
'x509-fingerprint-md5', r['fingerprint']['md5'])
signature = r['signature']['algorithm']
value = ''
if 'sha256' in signature and 'RSA' in signature:
value = 'SHA256_WITH_RSA_ENCRYPTION'
elif 'sha1' in signature and 'RSA' in signature:
value = 'SHA1_WITH_RSA_ENCRYPTION'
if value:
object_certificate.add_attribute('signature_algorithm', value)
object_certificate.add_attribute(
'pubkey-info-algorithm', r['publickey']['algorithm'])
if 'exponent' in r['publickey']:
object_certificate.add_attribute(
'pubkey-info-exponent', r['publickey']['exponent'])
if 'length' in r['publickey']:
object_certificate.add_attribute(
'pubkey-info-size', r['publickey']['length'])
object_certificate.add_attribute('issuer', r['issuer']['commonname'])
object_certificate.add_attribute(
'validity-not-before', r['validity']['notbefore'])
object_certificate.add_attribute(
'validity-not-after', r['validity']['notbefore'])
object_certificate.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(object_certificate)
def __get_object_domain_ip(self, obs, relation):
objet_domain_ip = MISPObject('domain-ip')
objet_domain_ip.add_attribute(relation, obs)
relation_attr = self.__get_relation_attribute()
if relation_attr:
objet_domain_ip.add_attribute(
relation_attr, self.attribute['value'])
objet_domain_ip.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(objet_domain_ip)
def __get_relation_attribute(self):
if self.attribute['type'] == 'ip-src':
return 'ip'
elif self.attribute['type'] == 'ip-dst':
return 'ip'
elif self.attribute['type'] == 'domain':
return 'domain'
elif self.attribute['type'] == 'hostname':
return 'domain'
def __get_object_cve(self, item, cve):
attributes = []
object_cve = MISPObject('vulnerability')
object_cve.add_attribute('id', cve)
object_cve.add_attribute('state', 'Published')
if type(item['ip']) is list:
for ip in item['ip']:
attributes.extend(
list(filter(lambda x: x['value'] == ip, self.misp_event['Attribute'])))
for obj in self.misp_event['Object']:
attributes.extend(
list(filter(lambda x: x['value'] == ip, obj['Attribute'])))
if type(item['ip']) is str:
for obj in self.misp_event['Object']:
for att in obj['Attribute']:
if att['value'] == item['ip']:
object_cve.add_reference(obj['uuid'], 'cve')
self.misp_event.add_object(object_cve)
def handler(q=False):
if q:
request = json.loads(q)
attribute = request['attribute']
if not request.get('config') or not request['config'].get('apikey'):
misperrors['error'] = 'Onyphe authentication is missing'
return misperrors
api = Onyphe(request['config'].get('apikey'))
api_key = request['config'].get('apikey')
if not api:
misperrors['error'] = 'Onyphe Error instance api'
onyphe_client = OnypheClient(api_key, attribute)
onyphe_client.get_query_onyphe()
results = onyphe_client.get_results()
ip = ''
if request.get('ip-src'):
ip = request['ip-src']
elif request.get('ip-dst'):
ip = request['ip-dst']
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
return handle_expansion(api, ip, misperrors)
else:
return False
def handle_expansion(api, ip, misperrors):
result = api.ip(ip)
if result['status'] == 'nok':
misperrors['error'] = result['message']
return misperrors
# categories = list(set([item['@category'] for item in result['results']]))
result_filtered = {"results": []}
urls_pasties = []
asn_list = []
os_list = []
domains_resolver = []
domains_forward = []
for r in result['results']:
if r['@category'] == 'pastries':
if r['source'] == 'pastebin':
urls_pasties.append('https://pastebin.com/raw/%s' % r['key'])
elif r['@category'] == 'synscan':
asn_list.append(r['asn'])
os_target = r['os']
if os_target != 'Unknown':
os_list.append(r['os'])
elif r['@category'] == 'resolver' and r['type'] == 'reverse':
domains_resolver.append(r['reverse'])
elif r['@category'] == 'resolver' and r['type'] == 'forward':
domains_forward.append(r['forward'])
result_filtered['results'].append({'types': ['url'], 'values': urls_pasties,
'categories': ['External analysis']})
result_filtered['results'].append({'types': ['AS'], 'values': list(set(asn_list)),
'categories': ['Network activity']})
result_filtered['results'].append({'types': ['target-machine'],
'values': list(set(os_list)),
'categories': ['Targeting data']})
result_filtered['results'].append({'types': ['domain'],
'values': list(set(domains_resolver)),
'categories': ['Network activity'],
'comment': 'resolver to %s' % ip})
result_filtered['results'].append({'types': ['domain'],
'values': list(set(domains_forward)),
'categories': ['Network activity'],
'comment': 'forward to %s' % ip})
return result_filtered
return {'results': results}
def introspection():

View File

@ -0,0 +1,140 @@
import json
import requests
from . import check_input_attribute, standard_error_message
from collections import defaultdict
from pymisp import MISPEvent, MISPObject
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst', 'ssh-fingerprint'],
'format': 'misp_standard'}
moduleinfo = {'version': '1', 'author': 'Jean-Louis Huynen',
'description': 'An expansion module to enrich, SSH key fingerprints and IP addresses with information collected by passive-ssh',
'module-type': ['expansion', 'hover']}
moduleconfig = ["custom_api_url", "api_user", "api_key"]
passivessh_url = 'https://passivessh.circl.lu/'
host_query = '/host/ssh'
fingerprint_query = '/fingerprint/all'
class PassivesshParser():
def __init__(self, attribute, passivesshresult):
self.attribute = attribute
self.passivesshresult = passivesshresult
self.misp_event = MISPEvent()
self.misp_event.add_attribute(**attribute)
self.references = defaultdict(list)
def get_result(self):
if self.references:
self.__build_references()
event = json.loads(self.misp_event.to_json())
results = {key: event[key] for key in (
'Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
def parse_passivessh_information(self):
passivessh_object = MISPObject('passive-ssh')
if 'first_seen' in self.passivesshresult:
passivessh_object.add_attribute(
'first_seen', **{'type': 'datetime', 'value': self.passivesshresult['first_seen']})
if 'last_seen' in self.passivesshresult:
passivessh_object.add_attribute(
'last_seen', **{'type': 'datetime', 'value': self.passivesshresult['last_seen']})
if 'base64' in self.passivesshresult:
passivessh_object.add_attribute(
'base64', **{'type': 'text', 'value': self.passivesshresult['base64']})
if 'keys' in self.passivesshresult:
for key in self.passivesshresult['keys']:
passivessh_object.add_attribute(
'fingerprint', **{'type': 'ssh-fingerprint', 'value': key['fingerprint']})
if 'hosts' in self.passivesshresult:
for host in self.passivesshresult['hosts']:
passivessh_object.add_attribute(
'host', **{'type': 'ip-dst', 'value': host})
passivessh_object.add_reference(self.attribute['uuid'], 'related-to')
self.misp_event.add_object(passivessh_object)
def __build_references(self):
for object_uuid, references in self.references.items():
for misp_object in self.misp_event.objects:
if misp_object.uuid == object_uuid:
for reference in references:
misp_object.add_reference(**reference)
break
def check_url(url):
return "{}/".format(url) if not url.endswith('/') else url
def handler(q=False):
if q is False:
return False
request = json.loads(q)
api_url = check_url(request['config']['custom_api_url']) if request['config'].get(
'custom_api_url') else passivessh_url
if request['config'].get('api_user'):
api_user = request['config'].get('api_user')
else:
misperrors['error'] = 'passive-ssh user required'
return misperrors
if request['config'].get('api_key'):
api_key = request['config'].get('api_key')
else:
misperrors['error'] = 'passive-ssh password required'
return misperrors
if not request.get('attribute') or not check_input_attribute(request['attribute']):
return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'}
attribute = request['attribute']
if attribute.get('type') == 'ip-src':
type = host_query
pass
elif attribute.get('type') == 'ip-dst':
type = host_query
pass
elif attribute.get('type') == 'ssh-fingerprint':
type = fingerprint_query
pass
else:
misperrors['error'] = 'ip is missing.'
return misperrors
r = requests.get("{}{}/{}".format(api_url, type,
attribute['value']), auth=(api_user, api_key))
if r.status_code == 200:
passivesshresult = r.json()
if not passivesshresult:
misperrors['error'] = 'Empty result'
return misperrors
elif r.status_code == 404:
misperrors['error'] = 'Non existing hash'
return misperrors
else:
misperrors['error'] = 'API not accessible'
return misperrors
parser = PassivesshParser(attribute, passivesshresult)
parser.parse_passivessh_information()
result = parser.get_result()
return result
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,221 @@
import logging
import json
from pymisp import MISPAttribute, MISPEvent, MISPTag, MISPObject
from . import check_input_attribute, checking_error, standard_error_message
from qintel_helper import search_qsentry
logger = logging.getLogger('qintel_qsentry')
logger.setLevel(logging.DEBUG)
moduleinfo = {
'version': '1.0',
'author': 'Qintel, LLC',
'description': 'Query Qintel QSentry for ip intelligence',
'module-type': ['hover', 'expansion']
}
moduleconfig = ['token', 'remote']
misperrors = {'error': 'Error'}
mispattributes = {
'input': ['ip-src', 'ip-dst'],
'output': ['ip-src', 'ip-dst', 'AS', 'freetext'],
'format': 'misp_standard'
}
TAG_COLOR = {
'benign': '#27ae60',
'suspicious': '#e6a902',
'malicious': '#c0392b'
}
CLIENT_HEADERS = {
'User-Agent': f"MISP/{moduleinfo['version']}",
}
def _return_error(message):
misperrors['error'] = message
return misperrors
def _make_tags(enriched_attr, result):
for tag in result['tags']:
color = TAG_COLOR['suspicious']
if tag == 'criminal':
color = TAG_COLOR['malicious']
t = MISPTag()
t.from_dict(**{
'name': f'qintel:tag="{tag}"',
'colour': color
})
enriched_attr.add_tag(**t)
return enriched_attr
def _make_enriched_attr(event, result, orig_attr):
enriched_object = MISPObject('Qintel Threat Enrichment')
enriched_object.add_reference(orig_attr.uuid, 'related-to')
enriched_attr = MISPAttribute()
enriched_attr.from_dict(**{
'value': orig_attr.value,
'type': orig_attr.type,
'distribution': 0,
'object_relation': 'enriched-attr',
'to_ids': orig_attr.to_ids
})
enriched_attr = _make_tags(enriched_attr, result)
enriched_object.add_attribute(**enriched_attr)
comment_attr = MISPAttribute()
comment_attr.from_dict(**{
'value': '\n'.join(result.get('descriptions', [])),
'type': 'text',
'object_relation': 'descriptions',
'distribution': 0
})
enriched_object.add_attribute(**comment_attr)
last_seen = MISPAttribute()
last_seen.from_dict(**{
'value': result.get('last_seen'),
'type': 'datetime',
'object_relation': 'last-seen',
'distribution': 0
})
enriched_object.add_attribute(**last_seen)
event.add_attribute(**orig_attr)
event.add_object(**enriched_object)
return event
def _make_asn_attr(event, result, orig_attr):
asn_object = MISPObject('asn')
asn_object.add_reference(orig_attr.uuid, 'related-to')
asn_attr = MISPAttribute()
asn_attr.from_dict(**{
'type': 'AS',
'value': result.get('asn'),
'object_relation': 'asn',
'distribution': 0
})
asn_object.add_attribute(**asn_attr)
org_attr = MISPAttribute()
org_attr.from_dict(**{
'type': 'text',
'value': result.get('asn_name', 'unknown').title(),
'object_relation': 'description',
'distribution': 0
})
asn_object.add_attribute(**org_attr)
event.add_object(**asn_object)
return event
def _format_hover(event, result):
enriched_object = event.get_objects_by_name('Qintel Threat Enrichment')[0]
tags = ', '.join(result.get('tags'))
enriched_object.add_attribute('Tags', type='text', value=tags)
return event
def _format_result(attribute, result):
event = MISPEvent()
orig_attr = MISPAttribute()
orig_attr.from_dict(**attribute)
event = _make_enriched_attr(event, result, orig_attr)
event = _make_asn_attr(event, result, orig_attr)
return event
def _check_config(config):
if not config:
return False
if not isinstance(config, dict):
return False
if config.get('token', '') == '':
return False
return True
def _check_request(request):
if not request.get('attribute'):
return f'{standard_error_message}, {checking_error}'
check_reqs = ('type', 'value')
if not check_input_attribute(request['attribute'],
requirements=check_reqs):
return f'{standard_error_message}, {checking_error}'
if request['attribute']['type'] not in mispattributes['input']:
return 'Unsupported attribute type'
def handler(q=False):
if not q:
return False
request = json.loads(q)
config = request.get('config')
if not _check_config(config):
return _return_error('Missing Qintel token')
check_request_error = _check_request(request)
if check_request_error:
return _return_error(check_request_error)
search_args = {
'token': config['token'],
'remote': config.get('remote')
}
try:
result = search_qsentry(request['attribute']['value'], **search_args)
except Exception as e:
return _return_error(str(e))
event = _format_result(request['attribute'], result)
if not request.get('event_id'):
event = _format_hover(event, result)
event = json.loads(event.to_json())
ret_result = {key: event[key] for key in ('Attribute', 'Object') if key
in event}
return {'results': ret_result}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

Some files were not shown because too many files have changed in this diff Show More