mirror of https://github.com/MISP/PyMISP
Merge remote-tracking branch 'CIRCL/master'
commit
b0cf917627
|
@ -1,6 +1,9 @@
|
|||
*.swp
|
||||
*.pem
|
||||
*.pyc
|
||||
examples/keys.py
|
||||
examples/cudeso.py
|
||||
examples/feed-generator/output/*.json
|
||||
build/*
|
||||
dist/*
|
||||
pymisp.egg-info/*
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
language: python
|
||||
|
||||
cache: pip
|
||||
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.3"
|
||||
- "3.4"
|
||||
- "3.5"
|
||||
- "3.5-dev"
|
||||
- "nightly"
|
||||
|
||||
install:
|
||||
- pip install -U nose
|
||||
- pip install coveralls
|
||||
- pip install codecov
|
||||
- pip install requests-mock
|
||||
- pip install .
|
||||
|
||||
script:
|
||||
- nosetests --with-coverage --cover-package=pymisp tests/test_offline.py
|
||||
|
||||
after_success:
|
||||
- codecov
|
||||
- coveralls
|
|
@ -0,0 +1 @@
|
|||
include pymisp/data/*
|
38
README.md
38
README.md
|
@ -1,3 +1,10 @@
|
|||
README
|
||||
======
|
||||
|
||||
[](http://pymisp.readthedocs.io/en/master/?badge=master)
|
||||
[](https://travis-ci.org/MISP/PyMISP)
|
||||
[](https://coveralls.io/github/MISP/PyMISP?branch=master)
|
||||
|
||||
# PyMISP - Python Library to access MISP
|
||||
|
||||
PyMISP is a Python library to access [MISP](https://github.com/MISP/MISP) platforms via their REST API.
|
||||
|
@ -8,40 +15,47 @@ PyMISP allows you to fetch events, add or update events/attributes, add or updat
|
|||
|
||||
* [requests](http://docs.python-requests.org)
|
||||
|
||||
## Install
|
||||
## Install from pip
|
||||
|
||||
~~~~
|
||||
```
|
||||
pip install pymisp
|
||||
```
|
||||
|
||||
## Install the lastest version from repo
|
||||
|
||||
```
|
||||
git clone https://github.com/CIRCL/PyMISP.git && cd PyMISP
|
||||
python setup.py install
|
||||
~~~~
|
||||
```
|
||||
|
||||
## Samples and how to use PyMISP
|
||||
## Samples and how to use PyMISP
|
||||
|
||||
Various examples and samples scripts are in the [examples/](examples/) directory.
|
||||
|
||||
In the examples directory, you will need to change the keys.py.sample to enter your MISP url and API key.
|
||||
|
||||
~~~~
|
||||
```
|
||||
cd examples
|
||||
cp keys.py.sample keys.py
|
||||
vim keys.py
|
||||
~~~~
|
||||
```
|
||||
|
||||
The API key of MISP is available in the Automation section of the MISP web interface.
|
||||
|
||||
To test if your URL and API keys are correct, you can test with examples/last.py to
|
||||
fetch the last 10 events published.
|
||||
|
||||
~~~~
|
||||
```
|
||||
cd examples
|
||||
python last.py -l 10
|
||||
~~~~
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
[PyMISP API documentation is available](http://www.circl.lu/assets/files/PyMISP.pdf).
|
||||
[PyMISP API documentation is available](https://media.readthedocs.org/pdf/pymisp/master/pymisp.pdf).
|
||||
|
||||
Documentation can be generated with epydoc:
|
||||
|
||||
~~~~
|
||||
epydoc --url https://github.com/CIRCL/PyMISP --graph all --name PyMISP --pdf pymisp -o doc
|
||||
~~~~
|
||||
```
|
||||
epydoc --url https://github.com/CIRCL/PyMISP --graph all --name PyMISP --pdf pymisp -o doc
|
||||
```
|
||||
|
|
|
@ -0,0 +1,225 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " epub3 to make an epub3"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
@echo " dummy to check syntax errors of document sources"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
.PHONY: html
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
.PHONY: dirhtml
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
.PHONY: singlehtml
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
.PHONY: pickle
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
.PHONY: json
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
.PHONY: htmlhelp
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
.PHONY: qthelp
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PyMISP.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyMISP.qhc"
|
||||
|
||||
.PHONY: applehelp
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
.PHONY: devhelp
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/PyMISP"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PyMISP"
|
||||
@echo "# devhelp"
|
||||
|
||||
.PHONY: epub
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
.PHONY: epub3
|
||||
epub3:
|
||||
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
||||
@echo
|
||||
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
||||
|
||||
.PHONY: latex
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
.PHONY: latexpdf
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: latexpdfja
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: text
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
.PHONY: man
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
.PHONY: texinfo
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
.PHONY: info
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
.PHONY: gettext
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
.PHONY: changes
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
.PHONY: linkcheck
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
.PHONY: doctest
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
.PHONY: coverage
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
.PHONY: xml
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
.PHONY: pseudoxml
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
|
||||
.PHONY: dummy
|
||||
dummy:
|
||||
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
||||
@echo
|
||||
@echo "Build finished. Dummy builder generates no files."
|
|
@ -0,0 +1,448 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# PyMISP documentation build configuration file, created by
|
||||
# sphinx-quickstart on Fri Aug 26 11:39:17 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
from recommonmark.parser import CommonMarkParser
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.mathjax',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.napoleon',
|
||||
]
|
||||
|
||||
napoleon_google_docstring = False
|
||||
napoleon_use_param = False
|
||||
napoleon_use_ivar = True
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
source_parsers = {
|
||||
'.md': CommonMarkParser,
|
||||
}
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = ['.rst', '.md']
|
||||
|
||||
# The encoding of source files.
|
||||
#
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'PyMISP'
|
||||
copyright = '2016, Raphaël Vinot'
|
||||
author = 'Raphaël Vinot'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '2.4.50'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '2.4.50'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#
|
||||
# today = ''
|
||||
#
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents.
|
||||
# "<project> v<release> documentation" by default.
|
||||
#
|
||||
# html_title = 'PyMISP v2.4.50'
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (relative to this directory) to use as a favicon of
|
||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#
|
||||
# html_extra_path = []
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
# The empty string is equivalent to '%b %d, %Y'.
|
||||
#
|
||||
# html_last_updated_fmt = None
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
|
||||
#
|
||||
# html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# 'ja' uses this config value.
|
||||
# 'zh' user can custom change `jieba` dictionary path.
|
||||
#
|
||||
# html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'PyMISPdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'PyMISP.tex', 'PyMISP Documentation',
|
||||
'Raphaël Vinot', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#
|
||||
# latex_appendices = []
|
||||
|
||||
# It false, will not define \strong, \code, itleref, \crossref ... but only
|
||||
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
|
||||
# packages.
|
||||
#
|
||||
# latex_keep_old_macro_names = True
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'pymisp', 'PyMISP Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'PyMISP', 'PyMISP Documentation',
|
||||
author, 'PyMISP', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# -- Options for Epub output ----------------------------------------------
|
||||
|
||||
# Bibliographic Dublin Core info.
|
||||
epub_title = project
|
||||
epub_author = author
|
||||
epub_publisher = author
|
||||
epub_copyright = copyright
|
||||
|
||||
# The basename for the epub file. It defaults to the project name.
|
||||
# epub_basename = project
|
||||
|
||||
# The HTML theme for the epub output. Since the default themes are not
|
||||
# optimized for small screen space, using the same theme for HTML and epub
|
||||
# output is usually not wise. This defaults to 'epub', a theme designed to save
|
||||
# visual space.
|
||||
#
|
||||
# epub_theme = 'epub'
|
||||
|
||||
# The language of the text. It defaults to the language option
|
||||
# or 'en' if the language is not set.
|
||||
#
|
||||
# epub_language = ''
|
||||
|
||||
# The scheme of the identifier. Typical schemes are ISBN or URL.
|
||||
# epub_scheme = ''
|
||||
|
||||
# The unique identifier of the text. This can be a ISBN number
|
||||
# or the project homepage.
|
||||
#
|
||||
# epub_identifier = ''
|
||||
|
||||
# A unique identification for the text.
|
||||
#
|
||||
# epub_uid = ''
|
||||
|
||||
# A tuple containing the cover image and cover page html template filenames.
|
||||
#
|
||||
# epub_cover = ()
|
||||
|
||||
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
|
||||
#
|
||||
# epub_guide = ()
|
||||
|
||||
# HTML files that should be inserted before the pages created by sphinx.
|
||||
# The format is a list of tuples containing the path and title.
|
||||
#
|
||||
# epub_pre_files = []
|
||||
|
||||
# HTML files that should be inserted after the pages created by sphinx.
|
||||
# The format is a list of tuples containing the path and title.
|
||||
#
|
||||
# epub_post_files = []
|
||||
|
||||
# A list of files that should not be packed into the epub file.
|
||||
epub_exclude_files = ['search.html']
|
||||
|
||||
# The depth of the table of contents in toc.ncx.
|
||||
#
|
||||
# epub_tocdepth = 3
|
||||
|
||||
# Allow duplicate toc entries.
|
||||
#
|
||||
# epub_tocdup = True
|
||||
|
||||
# Choose between 'default' and 'includehidden'.
|
||||
#
|
||||
# epub_tocscope = 'default'
|
||||
|
||||
# Fix unsupported image types using the Pillow.
|
||||
#
|
||||
# epub_fix_images = False
|
||||
|
||||
# Scale large images.
|
||||
#
|
||||
# epub_max_image_width = 0
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#
|
||||
# epub_show_urls = 'inline'
|
||||
|
||||
# If false, no index is generated.
|
||||
#
|
||||
# epub_use_index = True
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
|
@ -0,0 +1,25 @@
|
|||
.. PyMISP documentation master file, created by
|
||||
sphinx-quickstart on Fri Aug 26 11:39:17 2016.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to PyMISP's documentation!
|
||||
==================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
readme
|
||||
modules
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
pymisp
|
||||
======
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 4
|
||||
|
||||
pymisp
|
|
@ -0,0 +1,22 @@
|
|||
pymisp package
|
||||
==============
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
pymisp.api module
|
||||
-----------------
|
||||
|
||||
.. automodule:: pymisp.api
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: pymisp
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1 @@
|
|||
.. include:: ../../README.md
|
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json', debug=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Create an event on MISP.')
|
||||
parser.add_argument("-e", "--event", type=int, help="The id of the event to update.")
|
||||
parser.add_argument("-t", "--type", help="The type of the added attribute")
|
||||
parser.add_argument("-v", "--value", help="The value of the attribute")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
event = misp.get_event(args.event)
|
||||
event = misp.add_named_attribute(event, args.type, args.value)
|
||||
print(event)
|
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Add a new user by setting the mandory fields.')
|
||||
parser.add_argument("-e", "--email", required=True, help="Email linked to the account.")
|
||||
parser.add_argument("-o", "--org_id", required=True, help="Organisation linked to the user.")
|
||||
parser.add_argument("-r", "--role_id", required=True, help="Role linked to the user.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
print (misp.add_user(args.email, args.org_id, args.role_id))
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Add the user described in the given json. If no file is provided, returns a json listing all the fields used to describe a user.')
|
||||
parser.add_argument("-f", "--json_file", help="The name of the json file describing the user you want to create.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
if args.json_file is None:
|
||||
print (misp.get_add_user_fields_list())
|
||||
else:
|
||||
print(misp.add_user_json(args.json_file))
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
|
@ -27,25 +27,14 @@ def init(cert_to_priv=True):
|
|||
destination = PyMISP(url_cert, cert, cert_cert, 'xml')
|
||||
|
||||
|
||||
def _to_utf8(request):
|
||||
to_return = None
|
||||
if 'json' in request.headers['content-type']:
|
||||
to_return = request.json()
|
||||
else:
|
||||
to_return = request.text.encode('utf-8')
|
||||
return to_return
|
||||
|
||||
|
||||
def copy_event(event_id):
|
||||
r_src = source.get_event(event_id)
|
||||
to_send = _to_utf8(r_src)
|
||||
return destination.add_event(to_send)
|
||||
e = source.get_event(event_id)
|
||||
return destination.add_event(e)
|
||||
|
||||
|
||||
def update_event(event_id, event_to_update):
|
||||
r_src = source.get_event(event_id)
|
||||
to_send = _to_utf8(r_src)
|
||||
return destination.update_event(event_to_update, to_send)
|
||||
e = source.get_event(event_id)
|
||||
return destination.update_event(event_to_update, e)
|
||||
|
||||
|
||||
def list_copy(filename):
|
||||
|
@ -83,7 +72,7 @@ def copy(eventid):
|
|||
|
||||
def export_our_org():
|
||||
circl = source.search(org='CIRCL')
|
||||
return _to_utf8(circl)
|
||||
return circl
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
|
|
|
@ -13,20 +13,17 @@ except NameError:
|
|||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
return PyMISP(url, key, True, 'json', debug=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Send malware sample to MISP.')
|
||||
parser = argparse.ArgumentParser(description='Create an event on MISP.')
|
||||
parser.add_argument("-d", "--distrib", type=int, help="The distribution setting used for the attributes and for the newly created event, if relevant. [0-3].")
|
||||
parser.add_argument("-i", "--info", help="Used to populate the event info field if no event ID supplied.")
|
||||
parser.add_argument("-a", "--analysis", type=int, help="The analysis level of the newly created event, if applicatble. [0-2]")
|
||||
parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [0-3]")
|
||||
parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [1-4]")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
event = misp.new_event(args.distrib, args.threat, args.analysis, args.info)
|
||||
print event
|
||||
|
||||
response = misp.add_mutex(event, 'booh')
|
||||
print response
|
||||
print(event)
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
|
||||
# Usage for pipe masters: ./last.py -l 5h | jq .
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json', debug=True)
|
||||
|
||||
|
||||
def del_event(m, eventid):
|
||||
result = m.delete_event(eventid)
|
||||
print(result)
|
||||
|
||||
def del_attr(m, attrid):
|
||||
result = m.delete_attribute(attrid)
|
||||
print(result)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Delete an event from a MISP instance.')
|
||||
parser.add_argument("-e", "--event", help="Event ID to delete.")
|
||||
parser.add_argument("-a", "--attribute", help="Attribute ID to delete.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
if args.event:
|
||||
del_event(misp, args.event)
|
||||
else:
|
||||
del_attr(misp, args.attribute)
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Delete the user with the given id. Keep in mind that disabling users (by setting the disabled flag via an edit) is always prefered to keep user associations to events intact.')
|
||||
parser.add_argument("-i", "--user_id", help="The id of the user you want to delete.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
print(misp.delete_user(args.user_id))
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Edit the email of the user designed by the user_id.')
|
||||
parser.add_argument("-i", "--user_id", required=True, help="The name of the json file describing the user you want to modify.")
|
||||
parser.add_argument("-e", "--email", help="Email linked to the account.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
print(misp.edit_user(args.user_id, email=args.email))
|
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Edit the user designed by the user_id. If no file is provided, returns a json listing all the fields used to describe a user.')
|
||||
parser.add_argument("-i", "--user_id", required=True, help="The name of the json file describing the user you want to modify.")
|
||||
parser.add_argument("-f", "--json_file", help="The name of the json file describing your modifications.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
if args.json_file is None:
|
||||
print (misp.get_edit_user_fields_list(args.user_id))
|
||||
else:
|
||||
print(misp.edit_user_json(args.json_file, args.user_id))
|
|
@ -0,0 +1,126 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copy Emerging Threats Block IPs list to several MISP events
|
||||
# Because of the large size of the list the first run will take a minute
|
||||
# Running it again will update the MISP events if changes are detected
|
||||
#
|
||||
# This script requires PyMISP 2.4.50 or later
|
||||
|
||||
import sys, json, time, requests
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
|
||||
et_url = 'https://rules.emergingthreats.net/fwrules/emerging-Block-IPs.txt'
|
||||
et_str = 'Emerging Threats '
|
||||
|
||||
def init_misp():
|
||||
global mymisp
|
||||
mymisp = PyMISP(misp_url, misp_key)
|
||||
|
||||
def load_misp_event(eid):
|
||||
global et_attr
|
||||
global et_drev
|
||||
global et_event
|
||||
et_attr = {}
|
||||
et_drev = {}
|
||||
|
||||
et_event = mymisp.get(eid)
|
||||
echeck(et_event)
|
||||
for a in et_event['Event']['Attribute']:
|
||||
if a['category'] == 'Network activity':
|
||||
et_attr[a['value']] = a['id']
|
||||
continue
|
||||
if a['category'] == 'Internal reference':
|
||||
et_drev = a;
|
||||
|
||||
def init_et():
|
||||
global et_data
|
||||
global et_rev
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
s = requests.Session()
|
||||
r = s.get(et_url)
|
||||
if r.status_code != 200:
|
||||
raise Exception('Error getting ET data: {}'.format(r.text))
|
||||
name = ''
|
||||
et_data = {}
|
||||
et_rev = 0
|
||||
for line in r.text.splitlines():
|
||||
if line.startswith('# Rev '):
|
||||
et_rev = int(line[6:])
|
||||
continue
|
||||
if line.startswith('#'):
|
||||
name = line[1:].strip()
|
||||
if et_rev and not et_data.get(name):
|
||||
et_data[name] = {}
|
||||
continue
|
||||
l = line.rstrip()
|
||||
if l:
|
||||
et_data[name][l] = name
|
||||
|
||||
def update_et_event(name):
|
||||
if et_drev and et_rev and int(et_drev['value']) < et_rev:
|
||||
# Copy MISP attributes to new dict
|
||||
et_ips = dict.fromkeys(et_attr.keys())
|
||||
|
||||
# Weed out attributes still in ET data
|
||||
for k,v in et_data[name].items():
|
||||
et_attr.pop(k, None)
|
||||
|
||||
# Delete the leftover attributes from MISP
|
||||
for k,v in et_attr.items():
|
||||
r = mymisp.delete_attribute(v)
|
||||
if r.get('errors'):
|
||||
print "Error deleting attribute {} ({}): {}\n".format(v,k,r['errors'])
|
||||
|
||||
# Weed out ips already in the MISP event
|
||||
for k,v in et_ips.items():
|
||||
et_data[name].pop(k, None)
|
||||
|
||||
# Add new attributes to MISP event
|
||||
ipdst = []
|
||||
for i,k in enumerate(et_data[name].items(), 1-len(et_data[name])):
|
||||
ipdst.append(k[0])
|
||||
if i % 100 == 0:
|
||||
r = mymisp.add_ipdst(et_event, ipdst)
|
||||
echeck(r, et_event['Event']['id'])
|
||||
ipdst = []
|
||||
|
||||
# Update revision number
|
||||
et_drev['value'] = et_rev
|
||||
et_drev.pop('timestamp', None)
|
||||
attr = []
|
||||
attr.append(et_drev)
|
||||
|
||||
# Publish updated MISP event
|
||||
et_event['Event']['Attribute'] = attr
|
||||
et_event['Event']['published'] = False
|
||||
et_event['Event']['date'] = time.strftime('%Y-%m-%d')
|
||||
r = mymisp.publish(et_event)
|
||||
echeck(r, et_event['Event']['id'])
|
||||
|
||||
def echeck(r, eid=None):
|
||||
if r.get('errors'):
|
||||
if eid:
|
||||
print "Processing event {} failed: {}".format(eid, r['errors'])
|
||||
else:
|
||||
print r['errors']
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
init_misp()
|
||||
init_et()
|
||||
|
||||
for et_type in set(et_data.keys()):
|
||||
info = et_str + et_type
|
||||
r = mymisp.search_index(eventinfo=info)
|
||||
if r['response']:
|
||||
eid=r['response'][0]['id']
|
||||
else: # event not found, create it
|
||||
new_event = mymisp.new_event(info=info, distribution=3, threat_level_id=4, analysis=1)
|
||||
echeck(new_event)
|
||||
eid=new_event['Event']['id']
|
||||
r = mymisp.add_internal_text(new_event, 1, comment='Emerging Threats revision number')
|
||||
echeck(r, eid)
|
||||
load_misp_event(eid)
|
||||
update_et_event(et_type)
|
|
@ -0,0 +1,53 @@
|
|||
## Explanation
|
||||
|
||||
This folder contains scripts made to create dummy events in order to test MISP instances.
|
||||
|
||||
* dummy is a containing text only file used as uploaded attachement.
|
||||
* create\_dummy\_event.py will create a given number of events (default: 1)with a randomly generated domain|ip attribute as well as a copy of dummy file.
|
||||
* create\_massive\_dummy\_events.py will create a given number of events (default: 1) with a given number of randomly generated attributes(default: 3000).
|
||||
|
||||
### Tools description
|
||||
|
||||
* randomStringGenerator: generate a random string of a given size, characters used to build the string can be chosen, default are characters from string.ascii\_lowercase and string.digits
|
||||
* randomIpGenerator: generate a random ip
|
||||
|
||||
* floodtxt: add a generated string as attribute of the given event. The added attributes can be of the following category/type:
|
||||
- Internal reference/comment
|
||||
- Internal reference/text
|
||||
- Internal reference/other
|
||||
- Payload delivery/email-subject
|
||||
- Artifact dropped/mutex
|
||||
- Artifact dropped/filename
|
||||
* floodip: add a generated ip as attribute of the given event. The added attributes can be of the following category/type:
|
||||
- Network activity/ip-src
|
||||
- Network activity/ip.dst
|
||||
* flooddomain: add a generated domain-like string as attribute of the given event. The added attributes can be of the following category/type:
|
||||
- Network activity/hostname
|
||||
- Network activity/domain
|
||||
* flooddomainip: add a generated domain|ip-like string as attribute of the given event. The added attribute is of the following category/type:
|
||||
- Network activity/domain|ip
|
||||
* floodemail: add a generated email-like string as attribute of the given event. The added attributes can be of the following category/type:
|
||||
- Payload delivery/email-src
|
||||
- Payload delivery/email-dst
|
||||
* floodattachmentent: add a dummy file as attribute of the given event. The added attribute is of the following category/type:
|
||||
- Payload delivery/attachment
|
||||
|
||||
* create\_dummy\_event: create a dummy event named "dummy event" with these caracteristics:
|
||||
- Distribution: Your organisation only
|
||||
- Analysis: Initial
|
||||
- Threat Level: Undefined
|
||||
- Number of Attributes: 2
|
||||
- Attribute:
|
||||
- category/type: Network activity/domain|ip
|
||||
- value: Randomly generated
|
||||
- Attribute:
|
||||
-category/type: Payload delivery/attachment
|
||||
- value: 'dummy' file
|
||||
* create\_massive\_dummy\_events: create a dummy event named "massive dummy event" with these caracteristics:
|
||||
- Distribution: Your organisation only
|
||||
- Analysis: Initial
|
||||
- Threat Level: Undefined
|
||||
- Number of Attributes: Given as argument
|
||||
- Attribute:
|
||||
- category/type: Randomly chosen
|
||||
- value: Randomly generated or dummy file
|
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import tools
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Create a given number of event containing an domain|ip attribute and an attachment each.')
|
||||
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
if args.limit is None:
|
||||
args.limit = 1
|
||||
|
||||
for i in range(args.limit):
|
||||
tools.create_dummy_event(misp)
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import url, key
|
||||
import argparse
|
||||
import tools
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.')
|
||||
parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)")
|
||||
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = PyMISP(url, key, True, 'json')
|
||||
|
||||
if args.limit is None:
|
||||
args.limit = 1
|
||||
if args.attribute is None:
|
||||
args.attribute = 3000
|
||||
|
||||
for i in range(args.limit):
|
||||
tools.create_massive_dummy_events(misp, args.attribute)
|
|
@ -0,0 +1,21 @@
|
|||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
||||
DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY
|
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import random
|
||||
from random import randint
|
||||
import string
|
||||
|
||||
|
||||
def randomStringGenerator(size, chars=string.ascii_lowercase + string.digits):
|
||||
return ''.join(random.choice(chars) for _ in range(size))
|
||||
|
||||
|
||||
def randomIpGenerator():
|
||||
return str(randint(0, 255)) + '.' + str(randint(0, 255)) + '.' + str(randint(0, 255)) + '.' + str(randint(0, 255))
|
||||
|
||||
|
||||
def floodtxt(misp, event, maxlength=255):
|
||||
text = randomStringGenerator(randint(1, maxlength))
|
||||
textfunctions = [misp.add_internal_comment, misp.add_internal_text, misp.add_internal_other, misp.add_email_subject, misp.add_mutex, misp.add_filename]
|
||||
textfunctions[randint(0, 5)](event, text)
|
||||
|
||||
|
||||
def floodip(misp, event):
|
||||
ip = randomIpGenerator()
|
||||
ipfunctions = [misp.add_ipsrc, misp.add_ipdst]
|
||||
ipfunctions[randint(0, 1)](event, ip)
|
||||
|
||||
|
||||
def flooddomain(misp, event, maxlength=25):
|
||||
a = randomStringGenerator(randint(1, maxlength))
|
||||
b = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase)
|
||||
domain = a + '.' + b
|
||||
domainfunctions = [misp.add_hostname, misp.add_domain]
|
||||
domainfunctions[randint(0, 1)](event, domain)
|
||||
|
||||
|
||||
def flooddomainip(misp, event, maxlength=25):
|
||||
a = randomStringGenerator(randint(1, maxlength))
|
||||
b = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase)
|
||||
domain = a + '.' + b
|
||||
ip = randomIpGenerator()
|
||||
misp.add_domain_ip(event, domain, ip)
|
||||
|
||||
|
||||
def floodemail(misp, event, maxlength=25):
|
||||
a = randomStringGenerator(randint(1, maxlength))
|
||||
b = randomStringGenerator(randint(1, maxlength))
|
||||
c = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase)
|
||||
email = a + '@' + b + '.' + c
|
||||
emailfunctions = [misp.add_email_src, misp.add_email_dst]
|
||||
emailfunctions[randint(0, 1)](event, email)
|
||||
|
||||
|
||||
def floodattachment(misp, eventid, distribution, to_ids, category, comment, info, analysis, threat_level_id):
|
||||
filename = randomStringGenerator(randint(1, 128))
|
||||
misp.upload_sample(filename, 'dummy', eventid, distribution, to_ids, category, comment, info, analysis, threat_level_id)
|
||||
|
||||
|
||||
def create_dummy_event(misp):
|
||||
event = misp.new_event(0, 4, 0, 'dummy event')
|
||||
flooddomainip(misp, event)
|
||||
floodattachment(misp, event['Event']['id'], event['Event']['distribution'], False, 'Payload delivery', '', event['Event']['info'], event['Event']['analysis'], event['Event']['threat_level_id'])
|
||||
|
||||
|
||||
def create_massive_dummy_events(misp, nbattribute):
|
||||
event = misp.new_event(0, 4, 0, 'massive dummy event')
|
||||
eventid = event['Event']['id']
|
||||
functions = [floodtxt, floodip, flooddomain, flooddomainip, floodemail, floodattachment]
|
||||
for i in range(nbattribute):
|
||||
choice = randint(0, 5)
|
||||
if choice == 5:
|
||||
floodattachment(misp, eventid, event['Event']['distribution'], False, 'Payload delivery', '', event['Event']['info'], event['Event']['analysis'], event['Event']['threat_level_id'])
|
||||
else:
|
||||
functions[choice](misp, event)
|
|
@ -1,53 +1,56 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
from pymisp import PyMISP
|
||||
from settings import url, key, ssl, outputdir, filters
|
||||
from settings import url, key, ssl, outputdir, filters, valid_attribute_distribution_levels
|
||||
|
||||
|
||||
objectsToSave = {
|
||||
'Orgc': {
|
||||
'fields': ['name', 'uuid'],
|
||||
'multiple': False,
|
||||
},
|
||||
'Tag': {
|
||||
'fields': ['name', 'colour', 'exportable'],
|
||||
'multiple': True,
|
||||
},
|
||||
'Attribute': {
|
||||
'fields': ['uuid', 'value', 'category', 'type',
|
||||
'comment', 'data', 'timestamp',
|
||||
'to_ids'],
|
||||
'multiple': True,
|
||||
},
|
||||
}
|
||||
objectsToSave = {'Orgc': {'fields': ['name', 'uuid'],
|
||||
'multiple': False,
|
||||
},
|
||||
'Tag': {'fields': ['name', 'colour', 'exportable'],
|
||||
'multiple': True,
|
||||
},
|
||||
'Attribute': {'fields': ['uuid', 'value', 'category', 'type',
|
||||
'comment', 'data', 'timestamp', 'to_ids'],
|
||||
'multiple': True,
|
||||
},
|
||||
}
|
||||
|
||||
fieldsToSave = ['uuid', 'info', 'threat_level_id', 'analysis',
|
||||
'timestamp', 'publish_timestamp', 'published',
|
||||
'date']
|
||||
|
||||
valid_attribute_distributions = []
|
||||
|
||||
|
||||
def init():
|
||||
return PyMISP(url, key, ssl, 'json')
|
||||
# If we have an old settings.py file then this variable won't exist
|
||||
global valid_attribute_distributions
|
||||
try:
|
||||
valid_attribute_distributions = valid_attribute_distribution_levels
|
||||
except:
|
||||
valid_attribute_distributions = ['0', '1', '2', '3', '4', '5']
|
||||
return PyMISP(url, key, ssl)
|
||||
|
||||
|
||||
def saveEvent(misp, uuid):
|
||||
try:
|
||||
event = misp.get_event(uuid)
|
||||
event = __cleanUpEvent(event)
|
||||
event = json.dumps(event)
|
||||
eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w')
|
||||
eventFile.write(event)
|
||||
eventFile.close()
|
||||
except:
|
||||
event = misp.get_event(uuid)
|
||||
if not event.get('Event'):
|
||||
print('Error while fetching event: {}'.format(event['message']))
|
||||
sys.exit('Could not create file for event ' + uuid + '.')
|
||||
event = __cleanUpEvent(event)
|
||||
event = json.dumps(event)
|
||||
eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w')
|
||||
eventFile.write(event)
|
||||
eventFile.close()
|
||||
|
||||
|
||||
def __cleanUpEvent(event):
|
||||
temp = event.json()
|
||||
temp = event
|
||||
event = {'Event': {}}
|
||||
__cleanupEventFields(event, temp)
|
||||
__cleanupEventObjects(event, temp)
|
||||
|
@ -61,11 +64,20 @@ def __cleanupEventFields(event, temp):
|
|||
return event
|
||||
|
||||
|
||||
def __blockAttributeByDistribution(attribute):
|
||||
if attribute['distribution'] not in valid_attribute_distributions:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def __cleanupEventObjects(event, temp):
|
||||
for objectType in objectsToSave.keys():
|
||||
if objectsToSave[objectType]['multiple'] is True:
|
||||
if objectType in temp['Event']:
|
||||
for objectInstance in temp['Event'][objectType]:
|
||||
if objectType is 'Attribute':
|
||||
if __blockAttributeByDistribution(objectInstance):
|
||||
continue
|
||||
tempObject = {}
|
||||
for field in objectsToSave[objectType]['fields']:
|
||||
if field in objectInstance.keys():
|
||||
|
@ -86,7 +98,8 @@ def saveManifest(manifest):
|
|||
manifestFile = open(os.path.join(outputdir, 'manifest.json'), 'w')
|
||||
manifestFile.write(json.dumps(manifest))
|
||||
manifestFile.close()
|
||||
except:
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit('Could not create the manifest file.')
|
||||
|
||||
|
||||
|
@ -95,8 +108,7 @@ def __addEventToManifest(event):
|
|||
for eventTag in event['EventTag']:
|
||||
tags.append({'name': eventTag['Tag']['name'],
|
||||
'colour': eventTag['Tag']['colour']})
|
||||
return {
|
||||
'Orgc': event['Orgc'],
|
||||
return {'Orgc': event['Orgc'],
|
||||
'Tag': tags,
|
||||
'info': event['info'],
|
||||
'date': event['date'],
|
||||
|
@ -108,10 +120,12 @@ def __addEventToManifest(event):
|
|||
|
||||
if __name__ == '__main__':
|
||||
misp = init()
|
||||
result = misp.get_index(None, filters)
|
||||
try:
|
||||
events = result.json()
|
||||
except:
|
||||
r = misp.get_index(filters)
|
||||
events = r['response']
|
||||
print(events[0])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit("Invalid response received from MISP.")
|
||||
if len(events) == 0:
|
||||
sys.exit("No events returned.")
|
||||
|
@ -121,8 +135,7 @@ if __name__ == '__main__':
|
|||
for event in events:
|
||||
saveEvent(misp, event['uuid'])
|
||||
manifest[event['uuid']] = __addEventToManifest(event)
|
||||
print "Event " + str(counter) + "/" + str(total) + " exported."
|
||||
print("Event " + str(counter) + "/" + str(total) + " exported.")
|
||||
counter += 1
|
||||
saveManifest(manifest)
|
||||
print 'Manifest saved. Feed creation completed.'
|
||||
|
||||
print('Manifest saved. Feed creation completed.')
|
||||
|
|
|
@ -21,3 +21,19 @@ outputdir = 'output'
|
|||
# tlp:white and/or feed-export but exclude anything tagged privint
|
||||
filters = {}
|
||||
|
||||
|
||||
# By default all attributes will be included in the feed generation
|
||||
# Remove the levels that you do not wish to include in the feed
|
||||
# Use this to further narrow down what gets exported, for example:
|
||||
# Setting this to ['3', '5'] will exclude any attributes from the feed that
|
||||
# are not exportable to all or inherit the event
|
||||
#
|
||||
# The levels are as follows:
|
||||
# 0: Your Organisation Only
|
||||
# 1: This Community Only
|
||||
# 2: Connected Communities
|
||||
# 3: All
|
||||
# 4: Sharing Group
|
||||
# 5: Inherit Event
|
||||
valid_attribute_distribution_levels = ['0', '1', '2', '3', '4', '5']
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key,misp_verifycert
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import os
|
||||
import json
|
||||
|
@ -10,22 +10,28 @@ import json
|
|||
|
||||
# Usage for pipe masters: ./last.py -l 5h | jq .
|
||||
|
||||
proxies = {
|
||||
'http': 'http://127.0.0.1:8123',
|
||||
'https': 'http://127.0.0.1:8123',
|
||||
}
|
||||
|
||||
proxies = None
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
return PyMISP(url, key, misp_verifycert, 'json', proxies=proxies)
|
||||
|
||||
|
||||
def get_event(m, event, out=None):
|
||||
result = m.get_event(event)
|
||||
r = result.json()
|
||||
if out is None:
|
||||
print(json.dumps(r) + '\n')
|
||||
print(json.dumps(result) + '\n')
|
||||
else:
|
||||
with open(out, 'w') as f:
|
||||
f.write(json.dumps(r) + '\n')
|
||||
|
||||
f.write(json.dumps(result) + '\n')
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
parser = argparse.ArgumentParser(description='Get an event from a MISP instance.')
|
||||
parser.add_argument("-e", "--event", required=True, help="Event ID to get.")
|
||||
parser.add_argument("-o", "--output", help="Output file")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
|
@ -48,41 +48,34 @@ def get_event(event_id):
|
|||
|
||||
event_id = int(event_id)
|
||||
if event_id > 0:
|
||||
event = source.get_event(event_id)
|
||||
if event.status_code == 200:
|
||||
event_json = source.get_event(event_id)
|
||||
event_core = event_json["Event"]
|
||||
# event_threatlevel_id = event_core["threat_level_id"]
|
||||
|
||||
try:
|
||||
event_json = event.json()
|
||||
except:
|
||||
return False
|
||||
# attribute_count = event_core["attribute_count"]
|
||||
attribute = event_core["Attribute"]
|
||||
|
||||
event_core = event_json["Event"]
|
||||
# event_threatlevel_id = event_core["threat_level_id"]
|
||||
for attribute in event_core["Attribute"]:
|
||||
if app_ids_only and not attribute["to_ids"]:
|
||||
continue
|
||||
|
||||
# attribute_count = event_core["attribute_count"]
|
||||
attribute = event_core["Attribute"]
|
||||
|
||||
for attribute in event_core["Attribute"]:
|
||||
if app_ids_only and not attribute["to_ids"]:
|
||||
continue
|
||||
|
||||
value = attribute["value"]
|
||||
title = event_core["info"]
|
||||
if app_netflow:
|
||||
app_printcomment = False
|
||||
if attribute["type"] == "ip-dst" and app_ip_dst:
|
||||
network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")])
|
||||
value = attribute["value"]
|
||||
title = event_core["info"]
|
||||
if app_netflow:
|
||||
app_printcomment = False
|
||||
if attribute["type"] == "ip-dst" and app_ip_dst:
|
||||
network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")])
|
||||
else:
|
||||
if attribute["type"] == "ip-src" and app_ip_src:
|
||||
network_ip_src.append([build_entry(value, event_id, title, "ip-src")])
|
||||
elif attribute["type"] == "ip-dst" and app_ip_dst:
|
||||
network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")])
|
||||
elif attribute["type"] == "domain" and app_domain:
|
||||
network_domain.append([build_entry(value, event_id, title, "domain")])
|
||||
elif attribute["type"] == "hostname" and app_hostname:
|
||||
network_hostname.append([build_entry(value, event_id, title, "hostname")])
|
||||
else:
|
||||
if attribute["type"] == "ip-src" and app_ip_src:
|
||||
network_ip_src.append([build_entry(value, event_id, title, "ip-src")])
|
||||
elif attribute["type"] == "ip-dst" and app_ip_dst:
|
||||
network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")])
|
||||
elif attribute["type"] == "domain" and app_domain:
|
||||
network_domain.append([build_entry(value, event_id, title, "domain")])
|
||||
elif attribute["type"] == "hostname" and app_hostname:
|
||||
network_hostname.append([build_entry(value, event_id, title, "hostname")])
|
||||
else:
|
||||
continue
|
||||
continue
|
||||
else:
|
||||
print("Not a valid ID")
|
||||
return
|
||||
|
@ -121,8 +114,8 @@ def print_events():
|
|||
if firsthost:
|
||||
firsthost = False
|
||||
else:
|
||||
print " or "
|
||||
print "host %s" % ip[0]
|
||||
print(" or ")
|
||||
print("host %s" % ip[0])
|
||||
else:
|
||||
if app_ip_src:
|
||||
for ip in network_ip_src:
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from pymisp import Neo4j
|
||||
from pymisp import MISPEvent
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
"""
|
||||
Sample Neo4J query:
|
||||
|
||||
|
||||
MATCH ()-[r:has]->(n)
|
||||
WITH n, count(r) as rel_cnt
|
||||
WHERE rel_cnt > 5
|
||||
MATCH (m)-[r:has]->(n)
|
||||
RETURN m, n LIMIT 200;
|
||||
"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Get all the events matching a value.')
|
||||
parser.add_argument("-s", "--search", required=True, help="String to search.")
|
||||
parser.add_argument("--host", default='localhost:7474', help="Host where neo4j is running.")
|
||||
parser.add_argument("-u", "--user", default='neo4j', help="User on neo4j.")
|
||||
parser.add_argument("-p", "--password", default='neo4j', help="Password on neo4j.")
|
||||
parser.add_argument("-d", "--deleteall", action="store_true", default=False, help="Delete all nodes from the database")
|
||||
args = parser.parse_args()
|
||||
|
||||
neo4j = Neo4j(args.host, args.user, args.password)
|
||||
if args.deleteall:
|
||||
neo4j.del_all()
|
||||
misp = PyMISP(misp_url, misp_key)
|
||||
result = misp.search_all(args.search)
|
||||
for json_event in result['response']:
|
||||
if not json_event['Event']:
|
||||
print(json_event)
|
||||
continue
|
||||
print('Importing', json_event['Event']['info'], json_event['Event']['id'])
|
||||
try:
|
||||
misp_event = MISPEvent()
|
||||
misp_event.load(json_event)
|
||||
neo4j.import_event(misp_event)
|
||||
except:
|
||||
print('broken')
|
|
@ -0,0 +1,25 @@
|
|||
### Description
|
||||
|
||||
Python script for ioc import to misp
|
||||
|
||||
### requires
|
||||
|
||||
> python 2.7
|
||||
> PyMISP
|
||||
> BeautifulSoup (apt-get install python-bs4 python-lxml)
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
python ioc2misp.py -i myioc -t "tag:mytag='sample','tag:other='foo'"
|
||||
```
|
||||
|
||||
```bash
|
||||
time find /iocsample -type f|while read line ;do python ioc2misp.py -i ${line};done
|
||||
```
|
||||
|
||||
### Conf
|
||||
|
||||
* rename keys.py.sample as keys.py
|
||||
* add your url and api key in keys.py
|
||||
* use command in terminal
|
|
@ -0,0 +1,337 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Format description
|
||||
# @variables : camelCase
|
||||
# @functions : snake_case
|
||||
|
||||
from keys import mispUrl, mispKey, csvTaxonomyFile, iocMispMapping
|
||||
|
||||
try:
|
||||
from pymisp import PyMISP
|
||||
except:
|
||||
print("you need pymisp form github")
|
||||
import sys
|
||||
sys.exit(1)
|
||||
|
||||
import os
|
||||
import argparse
|
||||
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
except:
|
||||
print("install BeautifulSoup : sudo apt-get install python-bs4 python-lxml")
|
||||
import sys
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def misp_init(url, key):
|
||||
return PyMISP(url, key, False, 'json')
|
||||
|
||||
|
||||
def check_valid_ioc():
|
||||
|
||||
(filepath, filename) = os.path.split(iocDescriptions["iocfile"])
|
||||
(shortname, extension) = os.path.splitext(filename)
|
||||
|
||||
if (("ioc" in extension)) and (sum(1 for _ in open(iocDescriptions["iocfile"])) > 1):
|
||||
iocDescriptions['filename'] = filename
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_parse_ioc_file():
|
||||
return BeautifulSoup(open(iocDescriptions["iocfile"]), "lxml")
|
||||
|
||||
|
||||
def parse_ioc_search_content(iocContextSearch):
|
||||
for k, v in iocMispMapping.items():
|
||||
if str(k).lower() == str(iocContextSearch).lower():
|
||||
return v
|
||||
return False
|
||||
|
||||
|
||||
def create_attribute_json(iocContextSearch, attributeValue, attributeComment, force=False):
|
||||
#####################################
|
||||
# force used for description to upload
|
||||
if force:
|
||||
parseResult = ("Other", "comment")
|
||||
else:
|
||||
parseResult = parse_ioc_search_content(iocContextSearch)
|
||||
|
||||
if parseResult is False:
|
||||
|
||||
print("/!\ Not implemented :: {0} :: {1} :: Item add as 'Other','Comment'. Add it in your keys.py".format(iocContextSearch, attributeValue))
|
||||
########################################
|
||||
# force import to misp
|
||||
parseResult = ("Other", "comment")
|
||||
|
||||
comment = ""
|
||||
try:
|
||||
comment = parseResult[2] + attributeComment
|
||||
except:
|
||||
comment = attributeComment
|
||||
|
||||
attribute = {"category": parseResult[0],
|
||||
"type": parseResult[1],
|
||||
"value": attributeValue,
|
||||
"timestamp": "0",
|
||||
"to_ids": "0",
|
||||
"distribution": "0",
|
||||
"comment": comment
|
||||
}
|
||||
return attribute
|
||||
|
||||
|
||||
def create_attributes_from_ioc_json(soup):
|
||||
attributes = []
|
||||
|
||||
IndicatorItemValues = {}
|
||||
for item in soup.find_all("indicatoritem"):
|
||||
|
||||
if item.find('context'):
|
||||
IndicatorItemValues["context"] = str(item.find('context')['search'])
|
||||
else:
|
||||
IndicatorItemValues["context"] = ""
|
||||
if item.find('content'):
|
||||
IndicatorItemValues["content"] = str(item.find('content').text)
|
||||
else:
|
||||
IndicatorItemValues["content"] = ""
|
||||
if item.find('comment'):
|
||||
IndicatorItemValues["comment"] = str(item.find('comment').text)
|
||||
else:
|
||||
IndicatorItemValues["comment"] = ""
|
||||
|
||||
jsonAttribute = create_attribute_json(IndicatorItemValues["context"], IndicatorItemValues["content"], IndicatorItemValues["comment"])
|
||||
attributes.append(jsonAttribute)
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def create_misp_event_json(attributes):
|
||||
import time
|
||||
if iocDescriptions["authored_by"]:
|
||||
attributes.append(create_attribute_json(None, "authored_by", iocDescriptions["authored_by"], True))
|
||||
if iocDescriptions["authored_date"]:
|
||||
attributes.append(create_attribute_json(None, "authored_date", iocDescriptions["authored_date"], True))
|
||||
|
||||
##################################################
|
||||
# make short-description in "info field
|
||||
# if not exist make description
|
||||
# if "info"="short-description" make descrption as comment
|
||||
mispInfoFild = ""
|
||||
if iocDescriptions["short_description"]:
|
||||
mispInfoFild = iocDescriptions["short_description"]
|
||||
if iocDescriptions["description"]:
|
||||
attributes.append(create_attribute_json(None, "description", iocDescriptions["description"], True))
|
||||
else:
|
||||
if iocDescriptions["description"]:
|
||||
mispInfoFild = iocDescriptions["description"]
|
||||
else:
|
||||
mispInfoFild = "No description or short_description from IOC find."
|
||||
|
||||
eventJson = {"Event": {"info": mispInfoFild,
|
||||
"timestamp": "1",
|
||||
"attribute_count": 0,
|
||||
"analysis": "0",
|
||||
"date": time.strftime("%Y-%m-%d"),
|
||||
"org": "",
|
||||
"distribution": "0",
|
||||
"Attribute": [],
|
||||
"proposal_email_lock": False,
|
||||
"threat_level_id": "4",
|
||||
}}
|
||||
|
||||
eventJson["Event"]["Attribute"] = attributes
|
||||
|
||||
return eventJson
|
||||
|
||||
|
||||
def get_descriptions(soup, description):
|
||||
if soup.find(description.lower()):
|
||||
return soup.find(description.lower()).text
|
||||
return ""
|
||||
|
||||
|
||||
def save_ioc_description(soup):
|
||||
list_description = ["short_description", "authored_by", "authored_date", "description"]
|
||||
|
||||
for description in list_description:
|
||||
iocDescriptions[description] = get_descriptions(soup, description)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def get_taxonomy(soup):
|
||||
import csv
|
||||
taxonomy = []
|
||||
reader = csv.reader(open(csvTaxonomyFile, 'rb'), delimiter=';')
|
||||
#####################################
|
||||
# save file in a dict
|
||||
# r[0] = @link from csv
|
||||
# r[1] = @value from csv
|
||||
# = value
|
||||
# r[2] = @keep
|
||||
# 0 : don't creat tag
|
||||
# 1 : tag created
|
||||
# r[3] = @taxonomy
|
||||
|
||||
csvdic = {i: r for i, r in enumerate(reader)}
|
||||
|
||||
#########################################
|
||||
# find all link with soup
|
||||
for n in soup.find_all('link', rel=True):
|
||||
rel = str(n.attrs['rel'][0]).lower()
|
||||
|
||||
##########################
|
||||
# build special taxo
|
||||
# special string because link if a html value
|
||||
relValue = str(n.next_sibling).strip()
|
||||
if rel == 'family':
|
||||
if len(relValue) > 0:
|
||||
taxonomy.append("malware_classification:malware-family='" + relValue + "'")
|
||||
elif rel == 'threatgroup':
|
||||
if len(relValue) > 0:
|
||||
taxonomy.append("malware_classification:malware-threatgroup='" + relValue + "'")
|
||||
|
||||
#########################
|
||||
# build taxo from csv match
|
||||
else:
|
||||
taxo = [r[3] for r in {i: r for i, r in csvdic.items() if r[0].lower() == rel and str(r[2]) == "1"}.values() if r[1].lower() == relValue.lower() and str(r[2]) == "1"]
|
||||
|
||||
# taxo find in correspondance file
|
||||
if (len(taxo) > 0 and taxo[0] != ''):
|
||||
taxonomy.append(taxo[0])
|
||||
# not find
|
||||
return taxonomy
|
||||
|
||||
|
||||
def custum_color_tag(tagg):
|
||||
color = "#00ace6"
|
||||
if ":amber" in tagg:
|
||||
color = "#ffc200"
|
||||
if ":green:" in tagg:
|
||||
color = "#009933"
|
||||
if "tlp:green" in tagg:
|
||||
color = "#009933"
|
||||
if ":red:" in tagg:
|
||||
color = "#ff0000"
|
||||
if "tlp:red" in tagg:
|
||||
color = "#ff0000"
|
||||
if "tlp:white" in tagg:
|
||||
color = "#fafafa"
|
||||
return color
|
||||
|
||||
|
||||
def push_event_to_misp(jsonEvent):
|
||||
global misp
|
||||
|
||||
####################
|
||||
# upload json event
|
||||
event = misp.add_event(jsonEvent)
|
||||
|
||||
# save event id for file upload and tagg
|
||||
iocDescriptions["misp_event_id"] = event["Event"]["id"]
|
||||
|
||||
return
|
||||
|
||||
|
||||
def upload_file():
|
||||
|
||||
# filename,path, eid, distrib, ids, categ, info, ids, analysis, threat
|
||||
misp.upload_sample(iocDescriptions['filename'],
|
||||
iocDescriptions["iocfile"],
|
||||
iocDescriptions["misp_event_id"],
|
||||
"0",
|
||||
False,
|
||||
"External analysis",
|
||||
iocDescriptions["short_description"],
|
||||
None,
|
||||
"1",
|
||||
"4",
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def update_tag(listOfTagg):
|
||||
for tagg in listOfTagg:
|
||||
color = custum_color_tag(tagg)
|
||||
|
||||
#############################
|
||||
# creatz tag in MISP
|
||||
|
||||
misp.new_tag(str(tagg), str(color))
|
||||
#############################
|
||||
# link tag to MISP event
|
||||
toPost = {}
|
||||
toPost['Event'] = {'id': iocDescriptions["misp_event_id"]}
|
||||
misp.add_tag(toPost, str(tagg))
|
||||
return
|
||||
|
||||
|
||||
def main():
|
||||
global misp
|
||||
global iocDescriptions
|
||||
iocDescriptions = {}
|
||||
|
||||
################################
|
||||
# parse for valid argments
|
||||
parser = argparse.ArgumentParser(description='Get an event from a MISP instance.')
|
||||
parser.add_argument("-i", "--input", required=True, help="Input file")
|
||||
parser.add_argument("-t", "--tag", help="Add custom tags 'tlp:red,cossi:tmp=test'")
|
||||
args = parser.parse_args()
|
||||
|
||||
iocDescriptions["iocfile"] = os.path.abspath(args.input)
|
||||
|
||||
################################
|
||||
# check if file have ioc extention and if he is not empty
|
||||
if check_valid_ioc():
|
||||
|
||||
################################
|
||||
# Try to parse file
|
||||
iocfileparse = get_parse_ioc_file()
|
||||
else:
|
||||
print("/!\ Bad format {0}".format(iocDescriptions["iocfile"]))
|
||||
return
|
||||
|
||||
################################
|
||||
# save description for create event
|
||||
save_ioc_description(iocfileparse)
|
||||
|
||||
################################
|
||||
# parse ioc and buid json attributes
|
||||
jsonAttributes = create_attributes_from_ioc_json(iocfileparse)
|
||||
|
||||
################################
|
||||
# create a json misp event and append attributes
|
||||
jsonEvent = create_misp_event_json(jsonAttributes)
|
||||
|
||||
################################
|
||||
# try connection
|
||||
try:
|
||||
misp = misp_init(mispUrl, mispKey)
|
||||
except:
|
||||
print("/!\ Connection fail, bad url ({0}) or API key : {1}".format(mispUrl, mispKey))
|
||||
return
|
||||
|
||||
################################
|
||||
# Add event to MSIP
|
||||
push_event_to_misp(jsonEvent)
|
||||
|
||||
################################
|
||||
# Upload the IOC file and close tmpfile
|
||||
upload_file()
|
||||
|
||||
################################
|
||||
# Update MISP Event with tag from IOC
|
||||
update_tag(get_taxonomy(iocfileparse))
|
||||
|
||||
################################
|
||||
# Add custom Tag (-t)
|
||||
if args.tag:
|
||||
customTag = args.tag
|
||||
update_tag(customTag.split(","))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,94 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
mispUrl = ''
|
||||
mispKey = ''
|
||||
|
||||
###############################
|
||||
# file use for internal tag
|
||||
# some sample can be find here :
|
||||
# https://github.com/eset/malware-ioc
|
||||
# https://github.com/fireeye/iocs
|
||||
csvTaxonomyFile = "taxonomy.csv"
|
||||
|
||||
# csv delimiter : ";" with quotechar : "
|
||||
|
||||
###############################
|
||||
# link sample
|
||||
#~ <links>
|
||||
#~ <link rel="threatcategory">APT</link>
|
||||
#~ <link rel="threatgroup">APT12</link>
|
||||
#~ <link rel="category">Backdoor</link>
|
||||
#~ <link rel="license">Apache 2.0</link>
|
||||
#~ </links>
|
||||
|
||||
# @link from csv
|
||||
# = rel attribut from <link>
|
||||
# @value from csv
|
||||
# = value
|
||||
# @keep
|
||||
# 0 : don't create tag
|
||||
# 1 : tag created
|
||||
# @taxonomy
|
||||
# define tag for misp
|
||||
# @comment
|
||||
# litte description but not use
|
||||
|
||||
|
||||
#########################################
|
||||
# https://www.circl.lu/doc/misp/categories-and-types/index.html
|
||||
# /\
|
||||
# ||
|
||||
# ||
|
||||
# \/
|
||||
# http://schemas.mandiant.com/
|
||||
|
||||
# @index = Context/search form ioc
|
||||
# @(1, 2, 3)
|
||||
# 1. categorie mapping
|
||||
# 2. type mapping
|
||||
# 3. optionnal comment
|
||||
|
||||
|
||||
iocMispMapping = {
|
||||
|
||||
('DriverItem/DriverName') : (u'Artifacts dropped',u'other', u'DriverName. '),
|
||||
|
||||
('DnsEntryItem/Host') : (u'Network activity',u'domain'),
|
||||
|
||||
('Email/To') : (u'Targeting data',u'target-email'),
|
||||
('Email/Date') : (u'Other',u'comment',u'EmailDate. '),
|
||||
('Email/Body') : (u'Payload delivery',u'email-subject'),
|
||||
('Email/From') : (u'Payload delivery',u'email-dst'),
|
||||
('Email/Subject') : (u'Payload delivery',u'email-subject'),
|
||||
('Email/Attachment/Name') : (u'Payload delivery',u'email-attachment'),
|
||||
|
||||
('FileItem/Md5sum') : (u'External analysis',u'md5'),
|
||||
('FileItem/Sha1sum') : (u'External analysis',u'sha1'),
|
||||
('FileItem/FileName') : (u'External analysis',u'filename'),
|
||||
('FileItem/FullPath') : (u'External analysis',u'filename'),
|
||||
('FileItem/FilePath') : (u'External analysis',u'filename'),
|
||||
('FileItem/Sha256sum') : (u'External analysis',u'sha256'),
|
||||
|
||||
('Network/URI') : (u'Network activity',u'uri'),
|
||||
('Network/DNS') : (u'Network activity',u'domain'),
|
||||
('Network/String') : (u'Network activity',u'ip-dst'),
|
||||
('Network/UserAgent') : (u'Network activity',u'user-agent'),
|
||||
|
||||
('PortItem/localIP') : (u'Network activity',u'ip-dst'),
|
||||
|
||||
('ProcessItem/name') : (u'External analysis',u'pattern-in-memory', u'ProcessName. '),
|
||||
('ProcessItem/path') : (u'External analysis',u'pattern-in-memory', u'ProcessPath. '),
|
||||
('ProcessItem/Mutex') : (u'Artifacts dropped',u'mutex', u'mutex'),
|
||||
('ProcessItem/Pipe/Name') : (u'Artifacts dropped',u'named pipe'),
|
||||
('ProcessItem/Mutex/Name') : (u'Artifacts dropped',u'mutex', u'MutexName. '),
|
||||
|
||||
('RegistryItem/Text') : (u'Artifacts dropped',u'regkey', u'RegistryText. '),
|
||||
('RegistryItem/Path') : (u'Artifacts dropped',u'regkey', u'RegistryPath. '),
|
||||
|
||||
('ServiceItem/name') : (u'Artifacts dropped',u'windows-service-name'),
|
||||
('ServiceItem/type') : (u'Artifacts dropped',u'pattern-in-memory', u'ServiceType. '),
|
||||
|
||||
('Snort/Snort') : (u'Network activity',u'snort'),
|
||||
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
link,value,keep,taxonomy,comment
|
||||
classification,TLP AMBER,1,tlp:amber,
|
||||
classification,TLP GREEN,1,tlp:green,
|
||||
confidential,TLP-AMBER,1,tlp:amber,
|
||||
confidential,TLP GREEN,1,tlp:green,
|
||||
confidential,TLP-GREEN,1,tlp:green,
|
||||
confidential,TLP RED,1,tlp:red,
|
||||
exportable,Yes,0,,
|
||||
family,APT,1,malware_classification:malware-category='APT',
|
||||
family,APT3,1,malware_classification:malware-category='APT3',https://github.com/fireeye/iocs/tree/master/APT3
|
||||
license,Apache 2.0,0,,
|
||||
threatcategory,APT3,1,malware_classification:malware-category='APT3',https://github.com/fireeye/iocs/tree/master/APT3
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key,misp_verifycert
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import os
|
||||
import json
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Get a list of the sharing groups from the MISP instance.')
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
sharing_groups = misp.get_sharing_groups()
|
||||
print sharing_groups
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
{"values":["www.google.com", "8.8.8.8"], "timestamp":1460558710}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Add sighting.')
|
||||
parser.add_argument("-f", "--json_file", required=True, help="The name of the json file describing the attribute you want to add sighting to.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
misp.sighting_per_json(args.json_file)
|
|
@ -0,0 +1,33 @@
|
|||
## Explanation
|
||||
|
||||
* treemap.py is a script that will generate an interactive svg (attribute\_treemap.svg) containing a treepmap representing the distribution of attributes in a sample (data) fetched from the instance using "last" or "searchall" examples.
|
||||
* It will also generate a html document with a table (attribute\_table.html) containing count for each type of attribute.
|
||||
* test\_attribute\_treemap.html is a quick page made to visualize both treemap and table at the same time.
|
||||
|
||||
* tags\_count.py is a script that count the number of occurences of every tags in a fetched sample of Events in a given period of time.
|
||||
* tag\_search.py is a script that count the number of occurences of a given tag in a fetched sample of Events in a given period of time.
|
||||
* Events will be fetched from _days_ days ago to today.
|
||||
* _begindate_ is the beginning of the studied period. If it is later than today, an error will be raised.
|
||||
* _enddate_ is the end of the studied period. If it is earlier than _begindate_, an error will be raised.
|
||||
* tag\_search.py allows research for multiple tags is possible by separating each tag by the | symbol.
|
||||
* Partial research is also possible with tag\_search.py. For instance, search for "ransom" will also return tags containin "ransomware".
|
||||
|
||||
* tags\_to\_graphs.py is a script that will generate several plots to visualise tags distribution.
|
||||
* The studied _period_ can be either the 7, 28 or 360 last days
|
||||
* _accuracy_ allows to get smallers splits of data instead of the default values
|
||||
* _order_ define the accuracy of the curve fitting. Default value is 3
|
||||
* It will generate two plots comparing all the tags:
|
||||
* tags_repartition_plot that present the raw data
|
||||
* tags_repartition_trend_plot that present the general evolution for each tag
|
||||
* Then each taxonomies will be represented in three plots:
|
||||
* Raw datas: in "plot" folder, named with the name of the corresponding taxonomy
|
||||
* Trend: in "plot" folder, named _taxonomy_\_trend. general evolution of the data (linear fitting, curve fitting at order 1)
|
||||
* Curve fitting: in "plotlib" folder, name as the taxonomy it presents.
|
||||
* In order to visualize the last plots, a html file is also generated automaticaly (might be improved in the future)
|
||||
|
||||
:warning: These scripts are not time optimised
|
||||
|
||||
## Requierements
|
||||
|
||||
* [Pygal](https://github.com/Kozea/pygal/)
|
||||
* [Matplotlib](https://github.com/matplotlib/matplotlib)
|
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import tools
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Take a sample of events (based on last.py of searchall.py) and create a treemap epresenting the distribution of attributes in this sample.')
|
||||
parser.add_argument("-f", "--function", required=True, help='The parameter can be either set to "last" or "searchall". If the parameter is not valid, "last" will be the default setting.')
|
||||
parser.add_argument("-a", "--argument", required=True, help='if function is "last", time can be defined in days, hours, minutes (for example 5d or 12h or 30m). Otherwise, this argument is the string to search')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = PyMISP(misp_url, misp_key, misp_verifycert, 'json')
|
||||
|
||||
if args.function == "searchall":
|
||||
result = misp.search_all(args.argument)
|
||||
else:
|
||||
result = misp.download_last(args.argument)
|
||||
|
||||
if 'response' in result:
|
||||
events = tools.eventsListBuildFromArray(result)
|
||||
attributes = tools.attributesListBuild(events)
|
||||
temp = tools.getNbAttributePerEventCategoryType(attributes)
|
||||
temp = temp.groupby(level=['category', 'type']).sum()
|
||||
tools.createTreemap(temp, 'Attributes Distribution', 'attribute_treemap.svg', 'attribute_table.html')
|
||||
else:
|
||||
print ('There is no event answering the research criteria')
|
|
@ -0,0 +1,50 @@
|
|||
body
|
||||
{
|
||||
/*font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;*/
|
||||
font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace;
|
||||
}
|
||||
|
||||
h1
|
||||
{
|
||||
font-size: 16px;
|
||||
width: 290px;
|
||||
text-align:center;
|
||||
}
|
||||
|
||||
/*** Stats Tables ***/
|
||||
|
||||
table
|
||||
{
|
||||
border-collapse: collapse;
|
||||
border-spacing: 0;
|
||||
border: 1px solid #cbcbcb;
|
||||
}
|
||||
|
||||
tbody
|
||||
{
|
||||
font-size:12px;
|
||||
}
|
||||
|
||||
table td
|
||||
{
|
||||
border-left: 1px solid #cbcbcb;
|
||||
border-width: 0 0 0 1px;
|
||||
width: 500px;
|
||||
margin: 0;
|
||||
padding: 0.5em 1em;
|
||||
}
|
||||
|
||||
.test
|
||||
{
|
||||
width: 500px;
|
||||
}
|
||||
|
||||
table tr:nth-child(2n-1) td
|
||||
{
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
|
||||
table tr td:first-child
|
||||
{
|
||||
font-weight: bold;
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
body
|
||||
{
|
||||
/*font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;*/
|
||||
font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace;
|
||||
}
|
||||
|
||||
h1
|
||||
{
|
||||
font-size: 16px;
|
||||
width: 290px;
|
||||
text-align:center;
|
||||
}
|
||||
|
||||
/*** Stats Tables ***/
|
||||
|
||||
table
|
||||
{
|
||||
border-collapse: collapse;
|
||||
border-spacing: 0;
|
||||
table-layout: fixed;
|
||||
width: 6000px;
|
||||
border: 1px solid #cbcbcb;
|
||||
}
|
||||
|
||||
tbody
|
||||
{
|
||||
font-size:12px;
|
||||
}
|
||||
|
||||
td
|
||||
{
|
||||
border-left: 1px solid #cbcbcb;
|
||||
border-width: 0 0 0 1px;
|
||||
margin: 0;
|
||||
padding: 0.5em 1em;
|
||||
}
|
||||
|
||||
table tr td:first-child
|
||||
{
|
||||
font-weight: bold;
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
from datetime import datetime
|
||||
import argparse
|
||||
import tools
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
|
||||
# ######### fetch data ##########
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Take a sample of events (based on last.py) and give the number of occurrence of the given tag in this sample.')
|
||||
parser.add_argument("-t", "--tag", required=True, help="tag to search (search for multiple tags is possible by using |. example : \"osint|OSINT\")")
|
||||
parser.add_argument("-d", "--days", type=int, help="number of days before today to search. If not define, default value is 7")
|
||||
parser.add_argument("-b", "--begindate", help="The research will look for tags attached to events posted at or after the given startdate (format: yyyy-mm-dd): If no date is given, default time is epoch time (1970-1-1)")
|
||||
parser.add_argument("-e", "--enddate", help="The research will look for tags attached to events posted at or before the given enddate (format: yyyy-mm-dd): If no date is given, default time is now()")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
if args.days is None:
|
||||
args.days = 7
|
||||
result = misp.search(last='{}d'.format(args.days), metadata=True)
|
||||
|
||||
tools.checkDateConsistancy(args.begindate, args.enddate, tools.getLastdate(args.days))
|
||||
|
||||
if args.begindate is None:
|
||||
args.begindate = tools.getLastdate(args.days)
|
||||
else:
|
||||
args.begindate = tools.setBegindate(tools.toDatetime(args.begindate), tools.getLastdate(args.days))
|
||||
|
||||
if args.enddate is None:
|
||||
args.enddate = datetime.now()
|
||||
else:
|
||||
args.enddate = tools.setEnddate(tools.toDatetime(args.enddate))
|
||||
|
||||
if 'response' in result:
|
||||
events = tools.selectInRange(tools.eventsListBuildFromArray(result), begin=args.begindate, end=args.enddate)
|
||||
totalPeriodEvents = tools.getNbitems(events)
|
||||
tags = tools.tagsListBuild(events)
|
||||
result = tools.isTagIn(tags, args.tag)
|
||||
totalPeriodTags = len(result)
|
||||
|
||||
text = 'Studied pediod: from '
|
||||
if args.begindate is None:
|
||||
text = text + '1970-01-01'
|
||||
else:
|
||||
text = text + str(args.begindate.date())
|
||||
text = text + ' to '
|
||||
if args.enddate is None:
|
||||
text = text + str(datetime.now().date())
|
||||
else:
|
||||
text = text + str(args.enddate.date())
|
||||
|
||||
print('\n========================================================')
|
||||
print(text)
|
||||
print('During the studied pediod, ' + str(totalPeriodTags) + ' events out of ' + str(totalPeriodEvents) + ' contains at least one tag with ' + args.tag + '.')
|
||||
if totalPeriodEvents != 0:
|
||||
print('It represents {}% of the events in this period.'.format(round(100 * totalPeriodTags / totalPeriodEvents, 3)))
|
||||
else:
|
||||
print ('There is no event answering the research criteria')
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
from datetime import datetime
|
||||
import argparse
|
||||
import tools
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
|
||||
# ######### fetch data ##########
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Take a sample of events (based on last.py) and give the repartition of tags in this sample.')
|
||||
parser.add_argument("-d", "--days", type=int, help="number of days before today to search. If not define, default value is 7")
|
||||
parser.add_argument("-b", "--begindate", default='1970-01-01', help="The research will look for tags attached to events posted at or after the given startdate (format: yyyy-mm-dd): If no date is given, default time is epoch time (1970-1-1)")
|
||||
parser.add_argument("-e", "--enddate", help="The research will look for tags attached to events posted at or before the given enddate (format: yyyy-mm-dd): If no date is given, default time is now()")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
if args.days is None:
|
||||
args.days = 7
|
||||
result = misp.search(last='{}d'.format(args.days), metadata=True)
|
||||
|
||||
tools.checkDateConsistancy(args.begindate, args.enddate, tools.getLastdate(args.days))
|
||||
|
||||
if args.begindate is None:
|
||||
args.begindate = tools.getLastdate(args.days)
|
||||
else:
|
||||
args.begindate = tools.setBegindate(tools.toDatetime(args.begindate), tools.getLastdate(args.days))
|
||||
|
||||
if args.enddate is None:
|
||||
args.enddate = datetime.now()
|
||||
else:
|
||||
args.enddate = tools.setEnddate(tools.toDatetime(args.enddate))
|
||||
|
||||
if 'response' in result:
|
||||
events = tools.selectInRange(tools.eventsListBuildFromArray(result), begin=args.begindate, end=args.enddate)
|
||||
tags = tools.tagsListBuild(events)
|
||||
result = tools.getNbOccurenceTags(tags)
|
||||
else:
|
||||
result = 'There is no event during the studied period'
|
||||
|
||||
text = 'Studied pediod: from '
|
||||
if args.begindate is None:
|
||||
text = text + '1970-01-01'
|
||||
else:
|
||||
text = text + str(args.begindate.date())
|
||||
text = text + ' to '
|
||||
if args.enddate is None:
|
||||
text = text + str(datetime.now().date())
|
||||
else:
|
||||
text = text + str(args.enddate.date())
|
||||
|
||||
print('\n========================================================')
|
||||
print(text)
|
||||
print(result)
|
|
@ -0,0 +1,94 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import tools
|
||||
|
||||
|
||||
def formattingDataframe(dataframe, dates, NanValue):
|
||||
dataframe.reverse()
|
||||
dates.reverse()
|
||||
dataframe = tools.concat(dataframe)
|
||||
dataframe = tools.renameColumns(dataframe, dates)
|
||||
dataframe = tools.replaceNaN(dataframe, 0)
|
||||
return dataframe
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Show the evolution of trend of tags.')
|
||||
parser.add_argument("-p", "--period", help='Define the studied period. Can be the past year (y), month (m) or week (w). Week is the default value if no valid value is given.')
|
||||
parser.add_argument("-a", "--accuracy", help='Define the accuracy of the splits on the studied period. Can be per month (m) -for year only-, week (w) -month only- or day (d). The default value is always the biggest available.')
|
||||
parser.add_argument("-o", "--order", type=int, help='Define the accuracy of the curve fitting. Default value is 3')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = PyMISP(misp_url, misp_key, misp_verifycert)
|
||||
|
||||
if args.period == "y":
|
||||
if args.accuracy == "d":
|
||||
split = 360
|
||||
size = 1
|
||||
else:
|
||||
split = 12
|
||||
size = 30
|
||||
last = '360d'
|
||||
title = 'Tags repartition over the last 360 days'
|
||||
elif args.period == "m":
|
||||
if args.accuracy == "d":
|
||||
split = 28
|
||||
size = 1
|
||||
else:
|
||||
split = 4
|
||||
size = 7
|
||||
last = '28d'
|
||||
title = 'Tags repartition over the last 28 days'
|
||||
else:
|
||||
split = 7
|
||||
size = 1
|
||||
last = '7d'
|
||||
title = 'Tags repartition over the last 7 days'
|
||||
|
||||
result = misp.search(last=last, metadata=True)
|
||||
if 'response' in result:
|
||||
events = tools.eventsListBuildFromArray(result)
|
||||
result = []
|
||||
dates = []
|
||||
enddate = tools.getToday()
|
||||
colourDict = {}
|
||||
faketag = False
|
||||
|
||||
for i in range(split):
|
||||
begindate = tools.getNDaysBefore(enddate, size)
|
||||
dates.append(str(enddate.date()))
|
||||
eventstemp = tools.selectInRange(events, begin=begindate, end=enddate)
|
||||
if eventstemp is not None:
|
||||
tags = tools.tagsListBuild(eventstemp)
|
||||
if tags is not None:
|
||||
tools.createDictTagsColour(colourDict, tags)
|
||||
result.append(tools.getNbOccurenceTags(tags))
|
||||
else:
|
||||
result.append(tools.createFakeEmptyTagsSeries())
|
||||
faketag = True
|
||||
else:
|
||||
result.append(tools.createFakeEmptyTagsSeries())
|
||||
faketag = True
|
||||
enddate = begindate
|
||||
|
||||
result = formattingDataframe(result, dates, 0)
|
||||
if faketag:
|
||||
result = tools.removeFaketagRow(result)
|
||||
|
||||
taxonomies, emptyOther = tools.getTaxonomies(tools.getCopyDataframe(result))
|
||||
|
||||
tools.tagsToLineChart(tools.getCopyDataframe(result), title, dates, colourDict)
|
||||
tools.tagstrendToLineChart(tools.getCopyDataframe(result), title, dates, split, colourDict)
|
||||
tools.tagsToTaxoLineChart(tools.getCopyDataframe(result), title, dates, colourDict, taxonomies, emptyOther)
|
||||
tools.tagstrendToTaxoLineChart(tools.getCopyDataframe(result), title, dates, split, colourDict, taxonomies, emptyOther)
|
||||
if args.order is None:
|
||||
args.order = 3
|
||||
tools.tagsToPolyChart(tools.getCopyDataframe(result), split, colourDict, taxonomies, emptyOther, args.order)
|
||||
tools.createVisualisation(taxonomies)
|
||||
|
||||
else:
|
||||
print('There is no event during the studied period')
|
|
@ -0,0 +1,26 @@
|
|||
<html>
|
||||
<head>
|
||||
<style>
|
||||
#stats
|
||||
{
|
||||
height: 746px;
|
||||
margin-top: 100px;
|
||||
}
|
||||
|
||||
#treemap
|
||||
{
|
||||
width: 1000px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<table><tr>
|
||||
<td><iframe id="stats" src="attribute_table.html" frameBorder="0"></iframe></td>
|
||||
<td id="treemap"><object type="image/svg+xml" data="attribute_treemap.svg"></object></td>
|
||||
</tr></table>
|
||||
<!--
|
||||
<div id="stats"><iframe src="table.html"></iframe></div>
|
||||
<div id="treemap"><object type="image/svg+xml" data="test.svg"></object></div>
|
||||
-->
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,495 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from json import JSONDecoder
|
||||
import random
|
||||
import pygal
|
||||
from pygal.style import Style
|
||||
import pandas
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from dateutil.parser import parse
|
||||
import numpy
|
||||
from scipy import stats
|
||||
from pytaxonomies import Taxonomies
|
||||
import re
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib import pylab
|
||||
import os
|
||||
|
||||
|
||||
class DateError(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.value)
|
||||
|
||||
|
||||
# ############### Date Tools ################
|
||||
|
||||
def dateInRange(datetimeTested, begin=None, end=None):
|
||||
if begin is None:
|
||||
begin = datetime(1970, 1, 1)
|
||||
if end is None:
|
||||
end = datetime.now()
|
||||
return begin <= datetimeTested <= end
|
||||
|
||||
|
||||
def toDatetime(date):
|
||||
return parse(date)
|
||||
|
||||
|
||||
def checkDateConsistancy(begindate, enddate, lastdate):
|
||||
if begindate is not None and enddate is not None:
|
||||
if begindate > enddate:
|
||||
raise DateError('begindate ({}) cannot be after enddate ({})'.format(begindate, enddate))
|
||||
|
||||
if enddate is not None:
|
||||
if toDatetime(enddate) < lastdate:
|
||||
raise DateError('enddate ({}) cannot be before lastdate ({})'.format(enddate, lastdate))
|
||||
|
||||
if begindate is not None:
|
||||
if toDatetime(begindate) > datetime.now():
|
||||
raise DateError('begindate ({}) cannot be after today ({})'.format(begindate, datetime.now().date()))
|
||||
|
||||
|
||||
def setBegindate(begindate, lastdate):
|
||||
return max(begindate, lastdate)
|
||||
|
||||
|
||||
def setEnddate(enddate):
|
||||
return min(enddate, datetime.now())
|
||||
|
||||
|
||||
def getLastdate(last):
|
||||
return (datetime.now() - timedelta(days=int(last))).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
def getNDaysBefore(date, days):
|
||||
return (date - timedelta(days=days)).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
def getToday():
|
||||
return (datetime.now()).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
# ############### Tools ################
|
||||
|
||||
|
||||
def getTaxonomies(dataframe):
|
||||
taxonomies = Taxonomies()
|
||||
taxonomies = list(taxonomies.keys())
|
||||
notInTaxo = []
|
||||
count = 0
|
||||
for taxonomy in taxonomies:
|
||||
empty = True
|
||||
for it in dataframe.iterrows():
|
||||
if it[0].startswith(taxonomy):
|
||||
empty = False
|
||||
dataframe = dataframe.drop([it[0]])
|
||||
count = count + 1
|
||||
if empty is True:
|
||||
notInTaxo.append(taxonomy)
|
||||
if dataframe.empty:
|
||||
emptyOther = True
|
||||
else:
|
||||
emptyOther = False
|
||||
for taxonomy in notInTaxo:
|
||||
taxonomies.remove(taxonomy)
|
||||
return taxonomies, emptyOther
|
||||
|
||||
|
||||
def buildDoubleIndex(index1, index2, datatype):
|
||||
it = -1
|
||||
newindex1 = []
|
||||
for index in index2:
|
||||
if index == 0:
|
||||
it += 1
|
||||
newindex1.append(index1[it])
|
||||
arrays = [newindex1, index2]
|
||||
tuples = list(zip(*arrays))
|
||||
return pandas.MultiIndex.from_tuples(tuples, names=['event', datatype])
|
||||
|
||||
|
||||
def buildNewColumn(index2, column):
|
||||
it = -1
|
||||
newcolumn = []
|
||||
for index in index2:
|
||||
if index == 0:
|
||||
it += 1
|
||||
newcolumn.append(column[it])
|
||||
return newcolumn
|
||||
|
||||
|
||||
def addColumn(dataframe, columnList, columnName):
|
||||
dataframe.loc[:, columnName] = pandas.Series(columnList, index=dataframe.index)
|
||||
|
||||
|
||||
def concat(data):
|
||||
return pandas.concat(data, axis=1)
|
||||
|
||||
|
||||
def createFakeEmptyTagsSeries():
|
||||
return pandas.Series({'Faketag': 0})
|
||||
|
||||
|
||||
def removeFaketagRow(dataframe):
|
||||
return dataframe.drop(['Faketag'])
|
||||
|
||||
|
||||
def getCopyDataframe(dataframe):
|
||||
return dataframe.copy()
|
||||
|
||||
|
||||
def createDictTagsColour(colourDict, tags):
|
||||
temp = tags.groupby(['name', 'colour']).count()['id']
|
||||
levels_name = temp.index.levels[0]
|
||||
levels_colour = temp.index.levels[1]
|
||||
labels_name = temp.index.labels[0]
|
||||
labels_colour = temp.index.labels[1]
|
||||
|
||||
for i in range(len(labels_name)):
|
||||
colourDict[levels_name[labels_name[i]]] = levels_colour[labels_colour[i]]
|
||||
|
||||
|
||||
def createTagsPlotStyle(dataframe, colourDict, taxonomy=None):
|
||||
colours = []
|
||||
if taxonomy is not None:
|
||||
for it in dataframe.iterrows():
|
||||
if it[0].startswith(taxonomy):
|
||||
colours.append(colourDict[it[0]])
|
||||
else:
|
||||
for it in dataframe.iterrows():
|
||||
colours.append(colourDict[it[0]])
|
||||
|
||||
style = Style(background='transparent',
|
||||
plot_background='#eeeeee',
|
||||
foreground='#111111',
|
||||
foreground_strong='#111111',
|
||||
foreground_subtle='#111111',
|
||||
opacity='.6',
|
||||
opacity_hover='.9',
|
||||
transition='400ms ease-in',
|
||||
colors=tuple(colours))
|
||||
return style
|
||||
|
||||
# ############### Formatting ################
|
||||
|
||||
|
||||
def eventsListBuildFromList(filename):
|
||||
with open(filename, 'r') as myfile:
|
||||
s = myfile.read().replace('\n', '')
|
||||
decoder = JSONDecoder()
|
||||
s_len = len(s)
|
||||
Events = []
|
||||
end = 0
|
||||
while end != s_len:
|
||||
Event, end = decoder.raw_decode(s, idx=end)
|
||||
Events.append(Event)
|
||||
data = []
|
||||
for e in Events:
|
||||
data.append(pandas.DataFrame.from_dict(e, orient='index'))
|
||||
Events = pandas.concat(data)
|
||||
for it in range(Events['attribute_count'].size):
|
||||
if Events['attribute_count'][it] is None:
|
||||
Events['attribute_count'][it] = '0'
|
||||
else:
|
||||
Events['attribute_count'][it] = int(Events['attribute_count'][it])
|
||||
Events = Events.set_index('id')
|
||||
return Events
|
||||
|
||||
|
||||
def eventsListBuildFromArray(jdata):
|
||||
'''
|
||||
returns a structure listing all primary events in the sample
|
||||
'''
|
||||
data = [pandas.DataFrame.from_dict(e, orient='index') for e in jdata['response']]
|
||||
events = pandas.concat(data)
|
||||
events = events.set_index(['id'])
|
||||
return events
|
||||
|
||||
|
||||
def attributesListBuild(events):
|
||||
attributes = [pandas.DataFrame(attribute) for attribute in events['Attribute']]
|
||||
return pandas.concat(attributes)
|
||||
|
||||
|
||||
def tagsListBuild(Events):
|
||||
Tags = []
|
||||
if 'Tag' in Events.columns:
|
||||
for Tag in Events['Tag']:
|
||||
if type(Tag) is not list:
|
||||
continue
|
||||
Tags.append(pandas.DataFrame(Tag))
|
||||
if Tags:
|
||||
Tags = pandas.concat(Tags)
|
||||
columnDate = buildNewColumn(Tags.index, Events['date'])
|
||||
addColumn(Tags, columnDate, 'date')
|
||||
index = buildDoubleIndex(Events.index, Tags.index, 'tag')
|
||||
Tags = Tags.set_index(index)
|
||||
else:
|
||||
Tags = None
|
||||
return Tags
|
||||
|
||||
|
||||
def selectInRange(Events, begin=None, end=None):
|
||||
inRange = []
|
||||
for i, Event in Events.iterrows():
|
||||
if dateInRange(parse(Event['date']), begin, end):
|
||||
inRange.append(Event.tolist())
|
||||
inRange = pandas.DataFrame(inRange)
|
||||
temp = Events.columns.tolist()
|
||||
if inRange.empty:
|
||||
return None
|
||||
inRange.columns = temp
|
||||
return inRange
|
||||
|
||||
|
||||
def isTagIn(dataframe, tag):
|
||||
temp = dataframe[dataframe['name'].str.contains(tag)].index.tolist()
|
||||
index = []
|
||||
for i in range(len(temp)):
|
||||
if temp[i][0] not in index:
|
||||
index.append(temp[i][0])
|
||||
return index
|
||||
|
||||
|
||||
def renameColumns(dataframe, namelist):
|
||||
dataframe.columns = namelist
|
||||
return dataframe
|
||||
|
||||
|
||||
def replaceNaN(dataframe, value):
|
||||
return dataframe.fillna(value)
|
||||
|
||||
# ############### Basic Stats ################
|
||||
|
||||
|
||||
def getNbitems(dataframe):
|
||||
return len(dataframe.index)
|
||||
|
||||
|
||||
def getNbAttributePerEventCategoryType(attributes):
|
||||
return attributes.groupby(['event_id', 'category', 'type']).count()['id']
|
||||
|
||||
|
||||
def getNbOccurenceTags(Tags):
|
||||
return Tags.groupby('name').count()['id']
|
||||
|
||||
# ############### Charts ################
|
||||
|
||||
|
||||
def createTable(colors, categ_types_hash, tablename='attribute_table.html'):
|
||||
with open(tablename, 'w') as target:
|
||||
target.write('<!DOCTYPE html>\n<html>\n<head>\n<link rel="stylesheet" href="style.css">\n</head>\n<body>')
|
||||
for categ_name, types in categ_types_hash.items():
|
||||
table = pygal.Treemap(pretty_print=True)
|
||||
target.write('\n <h1 style="color:{};">{}</h1>\n'.format(colors[categ_name], categ_name))
|
||||
for d in types:
|
||||
table.add(d['label'], d['value'])
|
||||
target.write(table.render_table(transpose=True))
|
||||
target.write('\n</body>\n</html>')
|
||||
|
||||
|
||||
def createTreemap(data, title, treename='attribute_treemap.svg', tablename='attribute_table.html'):
|
||||
labels_categ = data.index.labels[0]
|
||||
labels_types = data.index.labels[1]
|
||||
names_categ = data.index.levels[0]
|
||||
names_types = data.index.levels[1]
|
||||
categ_types_hash = {}
|
||||
for categ_id, type_val, total in zip(labels_categ, labels_types, data):
|
||||
if not categ_types_hash.get(names_categ[categ_id]):
|
||||
categ_types_hash[names_categ[categ_id]] = []
|
||||
dict_to_print = {'label': names_types[type_val], 'value': total}
|
||||
categ_types_hash[names_categ[categ_id]].append(dict_to_print)
|
||||
|
||||
colors = {categ: "#%06X" % random.randint(0, 0xFFFFFF) for categ in categ_types_hash.keys()}
|
||||
style = Style(background='transparent',
|
||||
plot_background='#FFFFFF',
|
||||
foreground='#111111',
|
||||
foreground_strong='#111111',
|
||||
foreground_subtle='#111111',
|
||||
opacity='.6',
|
||||
opacity_hover='.9',
|
||||
transition='400ms ease-in',
|
||||
colors=tuple(colors.values()))
|
||||
|
||||
treemap = pygal.Treemap(pretty_print=True, legend_at_bottom=True, style=style)
|
||||
treemap.title = title
|
||||
treemap.print_values = True
|
||||
treemap.print_labels = True
|
||||
|
||||
for categ_name, types in categ_types_hash.items():
|
||||
treemap.add(categ_name, types)
|
||||
|
||||
createTable(colors, categ_types_hash)
|
||||
treemap.render_to_file(treename)
|
||||
|
||||
|
||||
def tagsToLineChart(dataframe, title, dates, colourDict):
|
||||
style = createTagsPlotStyle(dataframe, colourDict)
|
||||
line_chart = pygal.Line(x_label_rotation=20, style=style, show_legend=False)
|
||||
line_chart.title = title
|
||||
line_chart.x_labels = dates
|
||||
for it in dataframe.iterrows():
|
||||
line_chart.add(it[0], it[1].tolist())
|
||||
line_chart.render_to_file('tags_repartition_plot.svg')
|
||||
|
||||
|
||||
def tagstrendToLineChart(dataframe, title, dates, split, colourDict):
|
||||
style = createTagsPlotStyle(dataframe, colourDict)
|
||||
line_chart = pygal.Line(x_label_rotation=20, style=style, show_legend=False)
|
||||
line_chart.title = title
|
||||
line_chart.x_labels = dates
|
||||
xi = numpy.arange(split)
|
||||
for it in dataframe.iterrows():
|
||||
slope, intercept, r_value, p_value, std_err = stats.linregress(xi, it[1])
|
||||
line = slope * xi + intercept
|
||||
line_chart.add(it[0], line, show_dots=False)
|
||||
line_chart.render_to_file('tags_repartition_trend_plot.svg')
|
||||
|
||||
|
||||
def tagsToTaxoLineChart(dataframe, title, dates, colourDict, taxonomies, emptyOther):
|
||||
style = createTagsPlotStyle(dataframe, colourDict)
|
||||
line_chart = pygal.Line(x_label_rotation=20, style=style)
|
||||
line_chart.title = title
|
||||
line_chart.x_labels = dates
|
||||
for taxonomy in taxonomies:
|
||||
taxoStyle = createTagsPlotStyle(dataframe, colourDict, taxonomy)
|
||||
taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle)
|
||||
taxo_line_chart.title = title + ': ' + taxonomy
|
||||
taxo_line_chart.x_labels = dates
|
||||
for it in dataframe.iterrows():
|
||||
if it[0].startswith(taxonomy):
|
||||
taxo_line_chart.add(re.sub(taxonomy + ':', '', it[0]), it[1].tolist())
|
||||
dataframe = dataframe.drop([it[0]])
|
||||
taxo_line_chart.render_to_file('plot/' + taxonomy + '.svg')
|
||||
|
||||
if not emptyOther:
|
||||
taxoStyle = createTagsPlotStyle(dataframe, colourDict)
|
||||
taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle)
|
||||
taxo_line_chart.title = title + ': other'
|
||||
taxo_line_chart.x_labels = dates
|
||||
for it in dataframe.iterrows():
|
||||
taxo_line_chart.add(it[0], it[1].tolist())
|
||||
taxo_line_chart.render_to_file('plot/other.svg')
|
||||
|
||||
|
||||
def tagstrendToTaxoLineChart(dataframe, title, dates, split, colourDict, taxonomies, emptyOther):
|
||||
style = createTagsPlotStyle(dataframe, colourDict)
|
||||
line_chart = pygal.Line(x_label_rotation=20, style=style)
|
||||
line_chart.title = title
|
||||
line_chart.x_labels = dates
|
||||
xi = numpy.arange(split)
|
||||
for taxonomy in taxonomies:
|
||||
taxoStyle = createTagsPlotStyle(dataframe, colourDict, taxonomy)
|
||||
taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle)
|
||||
taxo_line_chart.title = title + ': ' + taxonomy
|
||||
taxo_line_chart.x_labels = dates
|
||||
for it in dataframe.iterrows():
|
||||
if it[0].startswith(taxonomy):
|
||||
slope, intercept, r_value, p_value, std_err = stats.linregress(xi, it[1])
|
||||
line = slope * xi + intercept
|
||||
taxo_line_chart.add(re.sub(taxonomy + ':', '', it[0]), line, show_dots=False)
|
||||
dataframe = dataframe.drop([it[0]])
|
||||
taxo_line_chart.render_to_file('plot/' + taxonomy + '_trend.svg')
|
||||
|
||||
if not emptyOther:
|
||||
taxoStyle = createTagsPlotStyle(dataframe, colourDict)
|
||||
taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle)
|
||||
taxo_line_chart.title = title + ': other'
|
||||
taxo_line_chart.x_labels = dates
|
||||
for it in dataframe.iterrows():
|
||||
slope, intercept, r_value, p_value, std_err = stats.linregress(xi, it[1])
|
||||
line = slope * xi + intercept
|
||||
taxo_line_chart.add(it[0], line, show_dots=False)
|
||||
taxo_line_chart.render_to_file('plot/other_trend.svg')
|
||||
|
||||
|
||||
def tagsToPolyChart(dataframe, split, colourDict, taxonomies, emptyOther, order):
|
||||
for taxonomy in taxonomies:
|
||||
for it in dataframe.iterrows():
|
||||
if it[0].startswith(taxonomy):
|
||||
points = []
|
||||
for i in range(split):
|
||||
points.append((i, it[1][i]))
|
||||
color = colourDict[it[0]]
|
||||
label = re.sub(taxonomy + ':', '', it[0])
|
||||
points = numpy.array(points)
|
||||
dataframe = dataframe.drop([it[0]])
|
||||
|
||||
# get x and y vectors
|
||||
x = points[:, 0]
|
||||
y = points[:, 1]
|
||||
|
||||
# calculate polynomial
|
||||
z = numpy.polyfit(x, y, order)
|
||||
f = numpy.poly1d(z)
|
||||
|
||||
# calculate new x's and y's
|
||||
x_new = numpy.linspace(x[0], x[-1], 50)
|
||||
y_new = f(x_new)
|
||||
|
||||
plt.plot(x, y, '.', color=color)
|
||||
plt.plot(x_new, y_new, color=color, label=label + 'trend')
|
||||
|
||||
pylab.title('Polynomial Fit with Matplotlib: ' + taxonomy)
|
||||
pylab.legend(loc='center left', bbox_to_anchor=(1, 0.5))
|
||||
ax = plt.gca()
|
||||
# ax.set_facecolor((0.898, 0.898, 0.898))
|
||||
box = ax.get_position()
|
||||
ax.set_position([box.x0 - 0.01, box.y0, box.width * 0.78, box.height])
|
||||
fig = plt.gcf()
|
||||
fig.set_size_inches(20, 15)
|
||||
fig.savefig('plotlib/' + taxonomy + '.png')
|
||||
fig.clf()
|
||||
|
||||
if not emptyOther:
|
||||
for it in dataframe.iterrows():
|
||||
points = []
|
||||
for i in range(split):
|
||||
points.append((i, it[1][i]))
|
||||
|
||||
color = colourDict[it[0]]
|
||||
label = it[0]
|
||||
points = numpy.array(points)
|
||||
|
||||
# get x and y vectors
|
||||
x = points[:, 0]
|
||||
y = points[:, 1]
|
||||
|
||||
# calculate polynomial
|
||||
z = numpy.polyfit(x, y, order)
|
||||
f = numpy.poly1d(z)
|
||||
|
||||
# calculate new x's and y's
|
||||
x_new = numpy.linspace(x[0], x[-1], 50)
|
||||
y_new = f(x_new)
|
||||
|
||||
plt.plot(x, y, '.', color=color, label=label)
|
||||
plt.plot(x_new, y_new, color=color, label=label + 'trend')
|
||||
|
||||
pylab.title('Polynomial Fit with Matplotlib: other')
|
||||
pylab.legend(loc='center left', bbox_to_anchor=(1, 0.5))
|
||||
ax = plt.gca()
|
||||
#cax.set_facecolor((0.898, 0.898, 0.898))
|
||||
box = ax.get_position()
|
||||
ax.set_position([box.x0 - 0.01, box.y0, box.width * 0.78, box.height])
|
||||
fig = plt.gcf()
|
||||
fig.set_size_inches(20, 15)
|
||||
fig.savefig('plotlib/other.png')
|
||||
|
||||
|
||||
def createVisualisation(taxonomies):
|
||||
chain = '<!DOCTYPE html>\n<html>\n\t<head>\n\t\t<link rel="stylesheet" href="style2.css">\n\t</head>\n\t<body>'
|
||||
chain = chain + '<table>'
|
||||
for taxonomy in taxonomies:
|
||||
chain = chain + '<tr><td><object type="image/svg+xml" data="plot\\' + taxonomy + '.svg"></object></td><td><img src="plotlib\\' + taxonomy + '.png" alt="graph" /></td><td><object type="image/svg+xml" data="plot\\' + taxonomy + '_trend.svg"></object></td></tr>\n'
|
||||
|
||||
chain = chain + '<tr><td><object type="image/svg+xml" data="plot\other.svg"></object></td><td><img src="plotlib\other.png" alt="graph" /></td><td><object type="image/svg+xml" data="plot\other_trend.svg"></object></td></tr>\n'
|
||||
chain = chain + '</table>'
|
||||
chain = chain + '\n\t</body>\n</html>'
|
||||
|
||||
with open('test_tags_trend.html', 'w') as target:
|
||||
target.write(chain)
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Output attributes statistics from a MISP instance.')
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
print (misp.get_attributes_statistics(misp, percentage=True))
|
||||
print (misp.get_attributes_statistics(context='category', percentage=True))
|
|
@ -7,7 +7,7 @@ import argparse
|
|||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
return PyMISP(url, key, True)
|
||||
|
||||
|
||||
def fetch(m, all_events, event):
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
import json
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json', True)
|
||||
|
||||
|
||||
def get_tags(m):
|
||||
result = m.get_all_tags(True)
|
||||
r = result
|
||||
print(json.dumps(r) + '\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Get tags from MISP instance.')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
get_tags(misp)
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import json
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Get statistics from tags.')
|
||||
parser.add_argument("-p", "--percentage", action='store_true', default=None, help="An optional field, if set, it will return the results in percentages, otherwise it returns exact count.")
|
||||
parser.add_argument("-n", "--namesort", action='store_true', default=None, help="An optional field, if set, values are sort by the namespace, otherwise the sorting will happen on the value.")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
stats = misp.get_tags_statistics(args.percentage, args.namesort)
|
||||
print(json.dumps(stats))
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
from pymisp import mispevent
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Sign & verify a MISP event.')
|
||||
parser.add_argument("-i", "--input", required=True, help="Json file")
|
||||
parser.add_argument("-u", "--uid", required=True, help="GPG UID")
|
||||
args = parser.parse_args()
|
||||
|
||||
me = mispevent.MISPEvent()
|
||||
me.load(args.input)
|
||||
|
||||
me.sign(args.uid)
|
||||
me.verify(args.uid)
|
|
@ -10,13 +10,13 @@ import argparse
|
|||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
return PyMISP(url, key, True, 'json', debug=True)
|
||||
|
||||
|
||||
def up_event(m, event, content):
|
||||
with open(content, 'r') as f:
|
||||
result = m.update_event(event, f.read())
|
||||
print result.text
|
||||
print(result)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Get an event from a MISP instance.')
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key,misp_verifycert
|
||||
from keys import misp_url, misp_key, misp_verifycert
|
||||
import argparse
|
||||
import os
|
||||
import glob
|
||||
|
@ -12,8 +12,8 @@ def init(url, key):
|
|||
return PyMISP(url, key, misp_verifycert, 'json')
|
||||
|
||||
|
||||
def upload_files(m, eid, paths, distrib, ids, categ, info, analysis, threat):
|
||||
out = m.upload_samplelist(paths, eid, distrib, ids, categ, info, analysis, threat)
|
||||
def upload_files(m, eid, paths, distrib, ids, categ, comment, info, analysis, threat):
|
||||
out = m.upload_samplelist(paths, eid, distrib, ids, categ, comment, info, analysis, threat)
|
||||
print(out)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -26,6 +26,7 @@ if __name__ == '__main__':
|
|||
parser.add_argument("-i", "--info", help="Used to populate the event info field if no event ID supplied.")
|
||||
parser.add_argument("-a", "--analysis", type=int, help="The analysis level of the newly created event, if applicatble. [0-2]")
|
||||
parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [1-4]")
|
||||
parser.add_argument("-co", "--comment", type=str, help="Comment for the uploaded file(s).")
|
||||
args = parser.parse_args()
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
@ -39,4 +40,4 @@ if __name__ == '__main__':
|
|||
print('invalid file')
|
||||
exit(0)
|
||||
|
||||
upload_files(misp, args.event, files, args.distrib, args.ids, args.categ, args.info, args.analysis, args.threat)
|
||||
upload_files(misp, args.event, files, args.distrib, args.ids, args.categ, args.comment, args.info, args.analysis, args.threat)
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"email":"maaiil@domain.lu",
|
||||
"org_id":1,
|
||||
"role_id":1,
|
||||
"autoalert":1
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import misp_url, misp_key
|
||||
import argparse
|
||||
|
||||
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def init(url, key):
|
||||
return PyMISP(url, key, True, 'json')
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Get a list of the sharing groups from the MISP instance.')
|
||||
|
||||
misp = init(misp_url, misp_key)
|
||||
|
||||
users_list = misp.get_users_list()
|
||||
print (users_list)
|
|
@ -1,3 +1,7 @@
|
|||
__version__ = '2.2'
|
||||
__version__ = '2.4.56'
|
||||
|
||||
from .api import PyMISP, PyMISPError, NewEventError, NewAttributeError, MissingDependency, NoURL, NoKey
|
||||
from .exceptions import PyMISPError, NewEventError, NewAttributeError, MissingDependency, NoURL, NoKey
|
||||
from .api import PyMISP
|
||||
from .mispevent import MISPEvent, MISPAttribute, EncodeUpdate, EncodeFull
|
||||
from .tools.neo4j import Neo4j
|
||||
from .tools import stix
|
||||
|
|
1160
pymisp/api.py
1160
pymisp/api.py
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,706 @@
|
|||
{
|
||||
"result": {
|
||||
"sane_defaults": {
|
||||
"md5": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha1": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha256": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"pdb": {
|
||||
"default_category": "Artifacts dropped",
|
||||
"to_ids": 0
|
||||
},
|
||||
"filename|md5": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha1": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha256": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"ip-src": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"ip-dst": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"hostname": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"domain": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"domain|ip": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"email-src": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"email-dst": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"email-subject": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 0
|
||||
},
|
||||
"email-attachment": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"url": {
|
||||
"default_category": "External analysis",
|
||||
"to_ids": 1
|
||||
},
|
||||
"http-method": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 0
|
||||
},
|
||||
"user-agent": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 0
|
||||
},
|
||||
"regkey": {
|
||||
"default_category": "Persistence mechanism",
|
||||
"to_ids": 1
|
||||
},
|
||||
"regkey|value": {
|
||||
"default_category": "Persistence mechanism",
|
||||
"to_ids": 1
|
||||
},
|
||||
"AS": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 0
|
||||
},
|
||||
"snort": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"pattern-in-file": {
|
||||
"default_category": "Payload installation",
|
||||
"to_ids": 1
|
||||
},
|
||||
"pattern-in-traffic": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"pattern-in-memory": {
|
||||
"default_category": "Payload installation",
|
||||
"to_ids": 1
|
||||
},
|
||||
"yara": {
|
||||
"default_category": "Payload installation",
|
||||
"to_ids": 1
|
||||
},
|
||||
"vulnerability": {
|
||||
"default_category": "External analysis",
|
||||
"to_ids": 0
|
||||
},
|
||||
"attachment": {
|
||||
"default_category": "External analysis",
|
||||
"to_ids": 0
|
||||
},
|
||||
"malware-sample": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"link": {
|
||||
"default_category": "External analysis",
|
||||
"to_ids": 0
|
||||
},
|
||||
"comment": {
|
||||
"default_category": "Other",
|
||||
"to_ids": 0
|
||||
},
|
||||
"text": {
|
||||
"default_category": "Other",
|
||||
"to_ids": 0
|
||||
},
|
||||
"other": {
|
||||
"default_category": "Other",
|
||||
"to_ids": 0
|
||||
},
|
||||
"named pipe": {
|
||||
"default_category": "Artifacts dropped",
|
||||
"to_ids": 0
|
||||
},
|
||||
"mutex": {
|
||||
"default_category": "Artifacts dropped",
|
||||
"to_ids": 1
|
||||
},
|
||||
"target-user": {
|
||||
"default_category": "Targeting data",
|
||||
"to_ids": 0
|
||||
},
|
||||
"target-email": {
|
||||
"default_category": "Targeting data",
|
||||
"to_ids": 0
|
||||
},
|
||||
"target-machine": {
|
||||
"default_category": "Targeting data",
|
||||
"to_ids": 0
|
||||
},
|
||||
"target-org": {
|
||||
"default_category": "Targeting data",
|
||||
"to_ids": 0
|
||||
},
|
||||
"target-location": {
|
||||
"default_category": "Targeting data",
|
||||
"to_ids": 0
|
||||
},
|
||||
"target-external": {
|
||||
"default_category": "Targeting data",
|
||||
"to_ids": 0
|
||||
},
|
||||
"btc": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"iban": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"bic": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"bank-account-nr": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"aba-rtn": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"bin": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"cc-number": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"prtn": {
|
||||
"default_category": "Financial fraud",
|
||||
"to_ids": 1
|
||||
},
|
||||
"threat-actor": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"campaign-name": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"campaign-id": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"malware-type": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 0
|
||||
},
|
||||
"uri": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
},
|
||||
"authentihash": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"ssdeep": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"imphash": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"pehash": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha224": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha384": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha512": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha512/224": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"sha512/256": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"tlsh": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|authentihash": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|ssdeep": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|imphash": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|pehash": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha224": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha384": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha512": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha512/224": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|sha512/256": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"filename|tlsh": {
|
||||
"default_category": "Payload delivery",
|
||||
"to_ids": 1
|
||||
},
|
||||
"windows-scheduled-task": {
|
||||
"default_category": "Artifacts dropped",
|
||||
"to_ids": 0
|
||||
},
|
||||
"windows-service-name": {
|
||||
"default_category": "Artifacts dropped",
|
||||
"to_ids": 0
|
||||
},
|
||||
"windows-service-displayname": {
|
||||
"default_category": "Artifacts dropped",
|
||||
"to_ids": 0
|
||||
},
|
||||
"whois-registrant-email": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"whois-registrant-phone": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"whois-registrant-name": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"whois-registrar": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"whois-creation-date": {
|
||||
"default_category": "Attribution",
|
||||
"to_ids": 0
|
||||
},
|
||||
"x509-fingerprint-sha1": {
|
||||
"default_category": "Network activity",
|
||||
"to_ids": 1
|
||||
}
|
||||
},
|
||||
"types": [
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha256",
|
||||
"filename",
|
||||
"pdb",
|
||||
"filename|md5",
|
||||
"filename|sha1",
|
||||
"filename|sha256",
|
||||
"ip-src",
|
||||
"ip-dst",
|
||||
"hostname",
|
||||
"domain",
|
||||
"domain|ip",
|
||||
"email-src",
|
||||
"email-dst",
|
||||
"email-subject",
|
||||
"email-attachment",
|
||||
"url",
|
||||
"http-method",
|
||||
"user-agent",
|
||||
"regkey",
|
||||
"regkey|value",
|
||||
"AS",
|
||||
"snort",
|
||||
"pattern-in-file",
|
||||
"pattern-in-traffic",
|
||||
"pattern-in-memory",
|
||||
"yara",
|
||||
"vulnerability",
|
||||
"attachment",
|
||||
"malware-sample",
|
||||
"link",
|
||||
"comment",
|
||||
"text",
|
||||
"other",
|
||||
"named pipe",
|
||||
"mutex",
|
||||
"target-user",
|
||||
"target-email",
|
||||
"target-machine",
|
||||
"target-org",
|
||||
"target-location",
|
||||
"target-external",
|
||||
"btc",
|
||||
"iban",
|
||||
"bic",
|
||||
"bank-account-nr",
|
||||
"aba-rtn",
|
||||
"bin",
|
||||
"cc-number",
|
||||
"prtn",
|
||||
"threat-actor",
|
||||
"campaign-name",
|
||||
"campaign-id",
|
||||
"malware-type",
|
||||
"uri",
|
||||
"authentihash",
|
||||
"ssdeep",
|
||||
"imphash",
|
||||
"pehash",
|
||||
"sha224",
|
||||
"sha384",
|
||||
"sha512",
|
||||
"sha512/224",
|
||||
"sha512/256",
|
||||
"tlsh",
|
||||
"filename|authentihash",
|
||||
"filename|ssdeep",
|
||||
"filename|imphash",
|
||||
"filename|pehash",
|
||||
"filename|sha224",
|
||||
"filename|sha384",
|
||||
"filename|sha512",
|
||||
"filename|sha512/224",
|
||||
"filename|sha512/256",
|
||||
"filename|tlsh",
|
||||
"windows-scheduled-task",
|
||||
"windows-service-name",
|
||||
"windows-service-displayname",
|
||||
"whois-registrant-email",
|
||||
"whois-registrant-phone",
|
||||
"whois-registrant-name",
|
||||
"whois-registrar",
|
||||
"whois-creation-date",
|
||||
"x509-fingerprint-sha1"
|
||||
],
|
||||
"categories": [
|
||||
"Internal reference",
|
||||
"Targeting data",
|
||||
"Antivirus detection",
|
||||
"Payload delivery",
|
||||
"Artifacts dropped",
|
||||
"Payload installation",
|
||||
"Persistence mechanism",
|
||||
"Network activity",
|
||||
"Payload type",
|
||||
"Attribution",
|
||||
"External analysis",
|
||||
"Financial fraud",
|
||||
"Other"
|
||||
],
|
||||
"category_type_mappings": {
|
||||
"Internal reference": [
|
||||
"text",
|
||||
"link",
|
||||
"comment",
|
||||
"other"
|
||||
],
|
||||
"Targeting data": [
|
||||
"target-user",
|
||||
"target-email",
|
||||
"target-machine",
|
||||
"target-org",
|
||||
"target-location",
|
||||
"target-external",
|
||||
"comment"
|
||||
],
|
||||
"Antivirus detection": [
|
||||
"link",
|
||||
"comment",
|
||||
"text",
|
||||
"attachment",
|
||||
"other"
|
||||
],
|
||||
"Payload delivery": [
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha224",
|
||||
"sha256",
|
||||
"sha384",
|
||||
"sha512",
|
||||
"sha512/224",
|
||||
"sha512/256",
|
||||
"ssdeep",
|
||||
"imphash",
|
||||
"authentihash",
|
||||
"pehash",
|
||||
"tlsh",
|
||||
"filename",
|
||||
"filename|md5",
|
||||
"filename|sha1",
|
||||
"filename|sha224",
|
||||
"filename|sha256",
|
||||
"filename|sha384",
|
||||
"filename|sha512",
|
||||
"filename|sha512/224",
|
||||
"filename|sha512/256",
|
||||
"filename|authentihash",
|
||||
"filename|ssdeep",
|
||||
"filename|tlsh",
|
||||
"filename|imphash",
|
||||
"filename|pehash",
|
||||
"ip-src",
|
||||
"ip-dst",
|
||||
"hostname",
|
||||
"domain",
|
||||
"email-src",
|
||||
"email-dst",
|
||||
"email-subject",
|
||||
"email-attachment",
|
||||
"url",
|
||||
"user-agent",
|
||||
"AS",
|
||||
"pattern-in-file",
|
||||
"pattern-in-traffic",
|
||||
"yara",
|
||||
"attachment",
|
||||
"malware-sample",
|
||||
"link",
|
||||
"malware-type",
|
||||
"comment",
|
||||
"text",
|
||||
"vulnerability",
|
||||
"x509-fingerprint-sha1",
|
||||
"other"
|
||||
],
|
||||
"Artifacts dropped": [
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha224",
|
||||
"sha256",
|
||||
"sha384",
|
||||
"sha512",
|
||||
"sha512/224",
|
||||
"sha512/256",
|
||||
"ssdeep",
|
||||
"imphash",
|
||||
"authentihash",
|
||||
"filename",
|
||||
"filename|md5",
|
||||
"filename|sha1",
|
||||
"filename|sha224",
|
||||
"filename|sha256",
|
||||
"filename|sha384",
|
||||
"filename|sha512",
|
||||
"filename|sha512/224",
|
||||
"filename|sha512/256",
|
||||
"filename|authentihash",
|
||||
"filename|ssdeep",
|
||||
"filename|tlsh",
|
||||
"filename|imphash",
|
||||
"filename|pehash",
|
||||
"regkey",
|
||||
"regkey|value",
|
||||
"pattern-in-file",
|
||||
"pattern-in-memory",
|
||||
"pdb",
|
||||
"yara",
|
||||
"attachment",
|
||||
"malware-sample",
|
||||
"named pipe",
|
||||
"mutex",
|
||||
"windows-scheduled-task",
|
||||
"windows-service-name",
|
||||
"windows-service-displayname",
|
||||
"comment",
|
||||
"text",
|
||||
"x509-fingerprint-sha1",
|
||||
"other"
|
||||
],
|
||||
"Payload installation": [
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha224",
|
||||
"sha256",
|
||||
"sha384",
|
||||
"sha512",
|
||||
"sha512/224",
|
||||
"sha512/256",
|
||||
"ssdeep",
|
||||
"imphash",
|
||||
"authentihash",
|
||||
"pehash",
|
||||
"tlsh",
|
||||
"filename",
|
||||
"filename|md5",
|
||||
"filename|sha1",
|
||||
"filename|sha224",
|
||||
"filename|sha256",
|
||||
"filename|sha384",
|
||||
"filename|sha512",
|
||||
"filename|sha512/224",
|
||||
"filename|sha512/256",
|
||||
"filename|authentihash",
|
||||
"filename|ssdeep",
|
||||
"filename|tlsh",
|
||||
"filename|imphash",
|
||||
"filename|pehash",
|
||||
"pattern-in-file",
|
||||
"pattern-in-traffic",
|
||||
"pattern-in-memory",
|
||||
"yara",
|
||||
"vulnerability",
|
||||
"attachment",
|
||||
"malware-sample",
|
||||
"malware-type",
|
||||
"comment",
|
||||
"text",
|
||||
"x509-fingerprint-sha1",
|
||||
"other"
|
||||
],
|
||||
"Persistence mechanism": [
|
||||
"filename",
|
||||
"regkey",
|
||||
"regkey|value",
|
||||
"comment",
|
||||
"text",
|
||||
"other"
|
||||
],
|
||||
"Network activity": [
|
||||
"ip-src",
|
||||
"ip-dst",
|
||||
"hostname",
|
||||
"domain",
|
||||
"domain|ip",
|
||||
"email-dst",
|
||||
"url",
|
||||
"uri",
|
||||
"user-agent",
|
||||
"http-method",
|
||||
"AS",
|
||||
"snort",
|
||||
"pattern-in-file",
|
||||
"pattern-in-traffic",
|
||||
"attachment",
|
||||
"comment",
|
||||
"text",
|
||||
"x509-fingerprint-sha1",
|
||||
"other"
|
||||
],
|
||||
"Payload type": [
|
||||
"comment",
|
||||
"text",
|
||||
"other"
|
||||
],
|
||||
"Attribution": [
|
||||
"threat-actor",
|
||||
"campaign-name",
|
||||
"campaign-id",
|
||||
"whois-registrant-phone",
|
||||
"whois-registrant-email",
|
||||
"whois-registrant-name",
|
||||
"whois-registrar",
|
||||
"whois-creation-date",
|
||||
"comment",
|
||||
"text",
|
||||
"x509-fingerprint-sha1",
|
||||
"other"
|
||||
],
|
||||
"External analysis": [
|
||||
"md5",
|
||||
"sha1",
|
||||
"sha256",
|
||||
"filename",
|
||||
"filename|md5",
|
||||
"filename|sha1",
|
||||
"filename|sha256",
|
||||
"ip-src",
|
||||
"ip-dst",
|
||||
"hostname",
|
||||
"domain",
|
||||
"domain|ip",
|
||||
"url",
|
||||
"user-agent",
|
||||
"regkey",
|
||||
"regkey|value",
|
||||
"AS",
|
||||
"snort",
|
||||
"pattern-in-file",
|
||||
"pattern-in-traffic",
|
||||
"pattern-in-memory",
|
||||
"vulnerability",
|
||||
"attachment",
|
||||
"malware-sample",
|
||||
"link",
|
||||
"comment",
|
||||
"text",
|
||||
"x509-fingerprint-sha1",
|
||||
"other"
|
||||
],
|
||||
"Financial fraud": [
|
||||
"btc",
|
||||
"iban",
|
||||
"bic",
|
||||
"bank-account-nr",
|
||||
"aba-rtn",
|
||||
"bin",
|
||||
"cc-number",
|
||||
"prtn",
|
||||
"comment",
|
||||
"text",
|
||||
"other"
|
||||
],
|
||||
"Other": [
|
||||
"comment",
|
||||
"text",
|
||||
"other"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,321 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Event": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/id",
|
||||
"type": "string"
|
||||
},
|
||||
"orgc_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/orgc_id",
|
||||
"type": "string"
|
||||
},
|
||||
"org_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/org_id",
|
||||
"type": "string"
|
||||
},
|
||||
"date": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/date",
|
||||
"type": "string"
|
||||
},
|
||||
"threat_level_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/threat_level_id",
|
||||
"type": "string"
|
||||
},
|
||||
"info": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/info",
|
||||
"type": "string"
|
||||
},
|
||||
"published": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/published",
|
||||
"type": "boolean"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/uuid",
|
||||
"type": "string"
|
||||
},
|
||||
"attribute_count": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/attribute_count",
|
||||
"type": "string"
|
||||
},
|
||||
"analysis": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/analysis",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"distribution": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/distribution",
|
||||
"type": "string"
|
||||
},
|
||||
"proposal_email_lock": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/proposal_email_lock",
|
||||
"type": "boolean"
|
||||
},
|
||||
"locked": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/locked",
|
||||
"type": "boolean"
|
||||
},
|
||||
"publish_timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/publish_timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"sharing_group_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/sharing_group_id",
|
||||
"type": "string"
|
||||
},
|
||||
"Org": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Orgc": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Attribute": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/id",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/type",
|
||||
"type": "string"
|
||||
},
|
||||
"category": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/category",
|
||||
"type": "string"
|
||||
},
|
||||
"to_ids": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/to_ids",
|
||||
"type": "boolean"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/uuid",
|
||||
"type": "string"
|
||||
},
|
||||
"event_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/event_id",
|
||||
"type": "string"
|
||||
},
|
||||
"distribution": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/distribution",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"comment": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/comment",
|
||||
"type": "string"
|
||||
},
|
||||
"sharing_group_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/sharing_group_id",
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/value",
|
||||
"type": "string"
|
||||
},
|
||||
"SharingGroup": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/SharingGroup",
|
||||
"type": "array",
|
||||
"items": {},
|
||||
"additionalItems": false
|
||||
},
|
||||
"ShadowAttribute": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/ShadowAttribute",
|
||||
"type": "array",
|
||||
"items": {},
|
||||
"additionalItems": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
},
|
||||
"ShadowAttribute": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/ShadowAttribute",
|
||||
"type": "array",
|
||||
"items": {},
|
||||
"additionalItems": false
|
||||
},
|
||||
"RelatedEvent": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Org": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Orgc": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Event": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event",
|
||||
"type": "object",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/id",
|
||||
"type": "string"
|
||||
},
|
||||
"date": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/date",
|
||||
"type": "string"
|
||||
},
|
||||
"threat_level_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/threat_level_id",
|
||||
"type": "string"
|
||||
},
|
||||
"info": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/info",
|
||||
"type": "string"
|
||||
},
|
||||
"published": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/published",
|
||||
"type": "boolean"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/uuid",
|
||||
"type": "string"
|
||||
},
|
||||
"analysis": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/analysis",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"distribution": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/distribution",
|
||||
"type": "string"
|
||||
},
|
||||
"org_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/org_id",
|
||||
"type": "string"
|
||||
},
|
||||
"orgc_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/orgc_id",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
},
|
||||
"Tag": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/name",
|
||||
"type": "string"
|
||||
},
|
||||
"colour": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/colour",
|
||||
"type": "string"
|
||||
},
|
||||
"exportable": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/exportable",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"info"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Event"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,327 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Event": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/id",
|
||||
"type": "string"
|
||||
},
|
||||
"orgc_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/orgc_id",
|
||||
"type": "string"
|
||||
},
|
||||
"org_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/org_id",
|
||||
"type": "string"
|
||||
},
|
||||
"date": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/date",
|
||||
"type": "string"
|
||||
},
|
||||
"threat_level_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/threat_level_id",
|
||||
"type": "string"
|
||||
},
|
||||
"info": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/info",
|
||||
"type": "string"
|
||||
},
|
||||
"published": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/published",
|
||||
"type": "boolean"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/uuid",
|
||||
"type": "string"
|
||||
},
|
||||
"attribute_count": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/attribute_count",
|
||||
"type": "string"
|
||||
},
|
||||
"analysis": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/analysis",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"distribution": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/distribution",
|
||||
"type": "string"
|
||||
},
|
||||
"proposal_email_lock": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/proposal_email_lock",
|
||||
"type": "boolean"
|
||||
},
|
||||
"locked": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/locked",
|
||||
"type": "boolean"
|
||||
},
|
||||
"publish_timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/publish_timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"sharing_group_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/sharing_group_id",
|
||||
"type": "string"
|
||||
},
|
||||
"Org": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Orgc": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Attribute": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/id",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/type",
|
||||
"type": "string"
|
||||
},
|
||||
"category": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/category",
|
||||
"type": "string"
|
||||
},
|
||||
"to_ids": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/to_ids",
|
||||
"type": "boolean"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/uuid",
|
||||
"type": "string"
|
||||
},
|
||||
"event_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/event_id",
|
||||
"type": "string"
|
||||
},
|
||||
"distribution": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/distribution",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"comment": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/comment",
|
||||
"type": "string"
|
||||
},
|
||||
"sharing_group_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/sharing_group_id",
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/value",
|
||||
"type": "string"
|
||||
},
|
||||
"SharingGroup": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/SharingGroup",
|
||||
"type": "array",
|
||||
"items": {},
|
||||
"additionalItems": false
|
||||
},
|
||||
"ShadowAttribute": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/ShadowAttribute",
|
||||
"type": "array",
|
||||
"items": {},
|
||||
"additionalItems": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
},
|
||||
"ShadowAttribute": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/ShadowAttribute",
|
||||
"type": "array",
|
||||
"items": {},
|
||||
"additionalItems": false
|
||||
},
|
||||
"RelatedEvent": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Org": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Orgc": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/name",
|
||||
"type": "string"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/uuid",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Event": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event",
|
||||
"type": "object",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/id",
|
||||
"type": "string"
|
||||
},
|
||||
"date": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/date",
|
||||
"type": "string"
|
||||
},
|
||||
"threat_level_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/threat_level_id",
|
||||
"type": "string"
|
||||
},
|
||||
"info": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/info",
|
||||
"type": "string"
|
||||
},
|
||||
"published": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/published",
|
||||
"type": "boolean"
|
||||
},
|
||||
"uuid": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/uuid",
|
||||
"type": "string"
|
||||
},
|
||||
"analysis": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/analysis",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/timestamp",
|
||||
"type": "string"
|
||||
},
|
||||
"distribution": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/distribution",
|
||||
"type": "string"
|
||||
},
|
||||
"org_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/org_id",
|
||||
"type": "string"
|
||||
},
|
||||
"orgc_id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/orgc_id",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
},
|
||||
"Tag": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/id",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/name",
|
||||
"type": "string"
|
||||
},
|
||||
"colour": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/colour",
|
||||
"type": "string"
|
||||
},
|
||||
"exportable": {
|
||||
"id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/exportable",
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalItems": false
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"date",
|
||||
"threat_level_id",
|
||||
"info",
|
||||
"published",
|
||||
"analysis",
|
||||
"distribution",
|
||||
"Attribute"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Event"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
class PyMISPError(Exception):
|
||||
def __init__(self, message):
|
||||
super(PyMISPError, self).__init__(message)
|
||||
self.message = message
|
||||
|
||||
|
||||
class NewEventError(PyMISPError):
|
||||
pass
|
||||
|
||||
|
||||
class NewAttributeError(PyMISPError):
|
||||
pass
|
||||
|
||||
|
||||
class SearchError(PyMISPError):
|
||||
pass
|
||||
|
||||
|
||||
class MissingDependency(PyMISPError):
|
||||
pass
|
||||
|
||||
|
||||
class NoURL(PyMISPError):
|
||||
pass
|
||||
|
||||
|
||||
class NoKey(PyMISPError):
|
||||
pass
|
|
@ -0,0 +1,571 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import time
|
||||
import json
|
||||
from json import JSONEncoder
|
||||
import os
|
||||
import warnings
|
||||
import base64
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from dateutil.parser import parse
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import jsonschema
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# pyme renamed to gpg the 2016-10-28
|
||||
import gpg
|
||||
from gpg.constants.sig import mode
|
||||
has_pyme = True
|
||||
except ImportError:
|
||||
try:
|
||||
# pyme renamed to gpg the 2016-10-28
|
||||
import pyme as gpg
|
||||
from pyme.constants.sig import mode
|
||||
has_pyme = True
|
||||
except ImportError:
|
||||
has_pyme = False
|
||||
|
||||
from .exceptions import PyMISPError, NewEventError, NewAttributeError
|
||||
|
||||
# Least dirty way to support python 2 and 3
|
||||
try:
|
||||
basestring
|
||||
unicode
|
||||
warnings.warn("You're using python 2, it is strongly recommended to use python >=3.4")
|
||||
except NameError:
|
||||
basestring = str
|
||||
unicode = str
|
||||
|
||||
|
||||
class MISPAttribute(object):
|
||||
|
||||
def __init__(self, describe_types):
|
||||
self.categories = describe_types['categories']
|
||||
self.types = describe_types['types']
|
||||
self.category_type_mapping = describe_types['category_type_mappings']
|
||||
self.sane_default = describe_types['sane_defaults']
|
||||
self._reinitialize_attribute()
|
||||
|
||||
def _reinitialize_attribute(self):
|
||||
# Default values
|
||||
self.category = None
|
||||
self.type = None
|
||||
self.value = None
|
||||
self.to_ids = False
|
||||
self.comment = ''
|
||||
self.distribution = 5
|
||||
|
||||
# other possible values
|
||||
self.data = None
|
||||
self.encrypt = False
|
||||
self.id = None
|
||||
self.uuid = None
|
||||
self.timestamp = None
|
||||
self.sharing_group_id = None
|
||||
self.deleted = None
|
||||
self.sig = None
|
||||
self.SharingGroup = []
|
||||
self.ShadowAttribute = []
|
||||
self.disable_correlation = False
|
||||
self.RelatedAttribute = []
|
||||
self.Tag = []
|
||||
|
||||
def _serialize(self):
|
||||
return '{type}{category}{to_ids}{uuid}{timestamp}{comment}{deleted}{value}'.format(
|
||||
type=self.type, category=self.category, to_ids=self.to_ids, uuid=self.uuid, timestamp=self.timestamp,
|
||||
comment=self.comment, deleted=self.deleted, value=self.value).encode()
|
||||
|
||||
def sign(self, gpg_uid, passphrase=None):
|
||||
if not has_pyme:
|
||||
raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.')
|
||||
to_sign = self._serialize()
|
||||
with gpg.Context() as c:
|
||||
keys = list(c.keylist(gpg_uid))
|
||||
c.signers = keys[:1]
|
||||
if passphrase:
|
||||
c.set_passphrase_cb(lambda *args: passphrase)
|
||||
signed, _ = c.sign(to_sign, mode=mode.DETACH)
|
||||
self.sig = base64.b64encode(signed).decode()
|
||||
|
||||
def delete(self):
|
||||
self.deleted = True
|
||||
|
||||
def verify(self, gpg_uid):
|
||||
if not has_pyme:
|
||||
raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.')
|
||||
signed_data = self._serialize()
|
||||
with gpg.Context() as c:
|
||||
keys = list(c.keylist(gpg_uid))
|
||||
try:
|
||||
c.verify(signed_data, signature=base64.b64decode(self.sig), verify=keys[:1])
|
||||
return {self.uuid: True}
|
||||
except:
|
||||
return {self.uuid: False}
|
||||
|
||||
def set_all_values(self, **kwargs):
|
||||
if kwargs.get('type') and kwargs.get('category'):
|
||||
if kwargs['type'] not in self.category_type_mapping[kwargs['category']]:
|
||||
raise NewAttributeError('{} and {} is an invalid combinaison, type for this category has to be in {}'.format(self.type, self.category, (', '.join(self.category_type_mapping[kwargs['category']]))))
|
||||
# Required
|
||||
if kwargs.get('type'):
|
||||
self.type = kwargs['type']
|
||||
if self.type not in self.types:
|
||||
raise NewAttributeError('{} is invalid, type has to be in {}'.format(self.type, (', '.join(self.types))))
|
||||
elif not self.type:
|
||||
raise NewAttributeError('The type of the attribute is required.')
|
||||
|
||||
type_defaults = self.sane_default[self.type]
|
||||
|
||||
if kwargs.get('value'):
|
||||
self.value = kwargs['value']
|
||||
elif not self.value:
|
||||
raise NewAttributeError('The value of the attribute is required.')
|
||||
|
||||
# Default values
|
||||
if kwargs.get('category'):
|
||||
self.category = kwargs['category']
|
||||
if self.category not in self.categories:
|
||||
raise NewAttributeError('{} is invalid, category has to be in {}'.format(self.category, (', '.join(self.categories))))
|
||||
else:
|
||||
self.category = type_defaults['default_category']
|
||||
|
||||
if kwargs.get('to_ids'):
|
||||
self.to_ids = kwargs['to_ids']
|
||||
if not isinstance(self.to_ids, bool):
|
||||
raise NewAttributeError('{} is invalid, to_ids has to be True or False'.format(self.to_ids))
|
||||
else:
|
||||
self.to_ids = bool(int(type_defaults['to_ids']))
|
||||
if kwargs.get('comment'):
|
||||
self.comment = kwargs['comment']
|
||||
if kwargs.get('distribution'):
|
||||
self.distribution = int(kwargs['distribution'])
|
||||
if self.distribution not in [0, 1, 2, 3, 4, 5]:
|
||||
raise NewAttributeError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4, 5'.format(self.distribution))
|
||||
|
||||
# other possible values
|
||||
if kwargs.get('data'):
|
||||
self.data = kwargs['data']
|
||||
self._load_data()
|
||||
if kwargs.get('id'):
|
||||
self.id = int(kwargs['id'])
|
||||
if kwargs.get('uuid'):
|
||||
self.uuid = kwargs['uuid']
|
||||
if kwargs.get('timestamp'):
|
||||
self.timestamp = datetime.datetime.fromtimestamp(int(kwargs['timestamp']))
|
||||
if kwargs.get('sharing_group_id'):
|
||||
self.sharing_group_id = int(kwargs['sharing_group_id'])
|
||||
if kwargs.get('deleted'):
|
||||
self.deleted = kwargs['deleted']
|
||||
if kwargs.get('SharingGroup'):
|
||||
self.SharingGroup = kwargs['SharingGroup']
|
||||
if kwargs.get('ShadowAttribute'):
|
||||
self.ShadowAttribute = kwargs['ShadowAttribute']
|
||||
if kwargs.get('sig'):
|
||||
self.sig = kwargs['sig']
|
||||
if kwargs.get('Tag'):
|
||||
self.Tag = kwargs['Tag']
|
||||
|
||||
# If the user wants to disable correlation, let them. Defaults to False.
|
||||
self.disable_correlation = kwargs.get("disable_correlation", False)
|
||||
|
||||
def _prepare_new_malware_sample(self):
|
||||
if '|' in self.value:
|
||||
# Get the filename, ignore the md5, because humans.
|
||||
self.malware_filename, md5 = self.value.split('|')
|
||||
else:
|
||||
# Assuming the user only passed the filename
|
||||
self.malware_filename = self.value
|
||||
m = hashlib.md5()
|
||||
m.update(self.data.getvalue())
|
||||
md5 = m.hexdigest()
|
||||
self.value = '{}|{}'.format(self.malware_filename, md5)
|
||||
self.malware_binary = self.data
|
||||
self.encrypt = True
|
||||
|
||||
def _load_data(self):
|
||||
if not isinstance(self.data, BytesIO):
|
||||
self.data = BytesIO(base64.b64decode(self.data))
|
||||
if self.type == 'malware-sample':
|
||||
try:
|
||||
with ZipFile(self.data) as f:
|
||||
for name in f.namelist():
|
||||
if name.endswith('.txt'):
|
||||
with f.open(name, pwd=b'infected') as unpacked:
|
||||
self.malware_filename = unpacked.read().decode()
|
||||
else:
|
||||
with f.open(name, pwd=b'infected') as unpacked:
|
||||
self.malware_binary = BytesIO(unpacked.read())
|
||||
except:
|
||||
# not a encrypted zip file, assuming it is a new malware sample
|
||||
self._prepare_new_malware_sample()
|
||||
|
||||
def _json(self):
|
||||
to_return = {'type': self.type, 'category': self.category, 'to_ids': self.to_ids,
|
||||
'distribution': self.distribution, 'value': self.value,
|
||||
'comment': self.comment, 'disable_correlation': self.disable_correlation}
|
||||
if self.sig:
|
||||
to_return['sig'] = self.sig
|
||||
if self.sharing_group_id:
|
||||
to_return['sharing_group_id'] = self.sharing_group_id
|
||||
if self.Tag:
|
||||
to_return['Tag'] = self.Tag
|
||||
if self.data:
|
||||
to_return['data'] = base64.b64encode(self.data.getvalue()).decode()
|
||||
if self.encrypt:
|
||||
to_return['entrypt'] = self.encrypt
|
||||
to_return = _int_to_str(to_return)
|
||||
return to_return
|
||||
|
||||
def _json_full(self):
|
||||
to_return = self._json()
|
||||
if self.id:
|
||||
to_return['id'] = self.id
|
||||
if self.uuid:
|
||||
to_return['uuid'] = self.uuid
|
||||
if self.timestamp:
|
||||
to_return['timestamp'] = int(time.mktime(self.timestamp.timetuple()))
|
||||
if self.deleted is not None:
|
||||
to_return['deleted'] = self.deleted
|
||||
if self.ShadowAttribute:
|
||||
to_return['ShadowAttribute'] = self.ShadowAttribute
|
||||
if self.SharingGroup:
|
||||
to_return['SharingGroup'] = self.SharingGroup
|
||||
to_return = _int_to_str(to_return)
|
||||
return to_return
|
||||
|
||||
|
||||
class EncodeUpdate(JSONEncoder):
|
||||
def default(self, obj):
|
||||
try:
|
||||
return obj._json()
|
||||
except AttributeError:
|
||||
return JSONEncoder.default(self, obj)
|
||||
|
||||
|
||||
class EncodeFull(JSONEncoder):
|
||||
def default(self, obj):
|
||||
try:
|
||||
return obj._json_full()
|
||||
except AttributeError:
|
||||
return JSONEncoder.default(self, obj)
|
||||
|
||||
|
||||
def _int_to_str(d):
|
||||
# transform all integer back to string
|
||||
for k, v in d.items():
|
||||
if isinstance(v, int) and not isinstance(v, bool):
|
||||
d[k] = str(v)
|
||||
return d
|
||||
|
||||
|
||||
class MISPEvent(object):
|
||||
|
||||
def __init__(self, describe_types=None):
|
||||
self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
|
||||
with open(os.path.join(self.ressources_path, 'schema.json'), 'r') as f:
|
||||
self.json_schema = json.load(f)
|
||||
with open(os.path.join(self.ressources_path, 'schema-lax.json'), 'r') as f:
|
||||
self.json_schema_lax = json.load(f)
|
||||
if not describe_types:
|
||||
t = json.load(open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r'))
|
||||
describe_types = t['result']
|
||||
self.describe_types = describe_types
|
||||
self.categories = describe_types['categories']
|
||||
self.types = describe_types['types']
|
||||
self.category_type_mapping = describe_types['category_type_mappings']
|
||||
self.sane_default = describe_types['sane_defaults']
|
||||
self.new = True
|
||||
self.dump_full = False
|
||||
|
||||
self._reinitialize_event()
|
||||
|
||||
def _reinitialize_event(self):
|
||||
# Default values for a valid event to send to a MISP instance
|
||||
self.distribution = 3
|
||||
self.threat_level_id = 2
|
||||
self.analysis = 0
|
||||
self.info = None
|
||||
self.published = False
|
||||
self.date = datetime.date.today()
|
||||
self.attributes = []
|
||||
|
||||
# All other keys
|
||||
self.sig = None
|
||||
self.global_sig = None
|
||||
self.id = None
|
||||
self.orgc_id = None
|
||||
self.org_id = None
|
||||
self.uuid = None
|
||||
self.attribute_count = None
|
||||
self.timestamp = None
|
||||
self.proposal_email_lock = None
|
||||
self.locked = None
|
||||
self.publish_timestamp = None
|
||||
self.sharing_group_id = None
|
||||
self.Org = None
|
||||
self.Orgc = None
|
||||
self.ShadowAttribute = []
|
||||
self.RelatedEvent = []
|
||||
self.Tag = []
|
||||
self.Galaxy = None
|
||||
|
||||
def _serialize(self):
|
||||
return '{date}{threat_level_id}{info}{uuid}{analysis}{timestamp}'.format(
|
||||
date=self.date, threat_level_id=self.threat_level_id, info=self.info,
|
||||
uuid=self.uuid, analysis=self.analysis, timestamp=self.timestamp).encode()
|
||||
|
||||
def _serialize_sigs(self):
|
||||
all_sigs = self.sig
|
||||
for a in self.attributes:
|
||||
all_sigs += a.sig
|
||||
return all_sigs.encode()
|
||||
|
||||
def sign(self, gpg_uid, passphrase=None):
|
||||
if not has_pyme:
|
||||
raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.')
|
||||
to_sign = self._serialize()
|
||||
with gpg.Context() as c:
|
||||
keys = list(c.keylist(gpg_uid))
|
||||
c.signers = keys[:1]
|
||||
if passphrase:
|
||||
c.set_passphrase_cb(lambda *args: passphrase)
|
||||
signed, _ = c.sign(to_sign, mode=mode.DETACH)
|
||||
self.sig = base64.b64encode(signed).decode()
|
||||
for a in self.attributes:
|
||||
a.sign(gpg_uid, passphrase)
|
||||
to_sign_global = self._serialize_sigs()
|
||||
with gpg.Context() as c:
|
||||
keys = list(c.keylist(gpg_uid))
|
||||
c.signers = keys[:1]
|
||||
if passphrase:
|
||||
c.set_passphrase_cb(lambda *args: passphrase)
|
||||
signed, _ = c.sign(to_sign_global, mode=mode.DETACH)
|
||||
self.global_sig = base64.b64encode(signed).decode()
|
||||
|
||||
def verify(self, gpg_uid):
|
||||
if not has_pyme:
|
||||
raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.')
|
||||
to_return = {}
|
||||
signed_data = self._serialize()
|
||||
with gpg.Context() as c:
|
||||
keys = list(c.keylist(gpg_uid))
|
||||
try:
|
||||
c.verify(signed_data, signature=base64.b64decode(self.sig), verify=keys[:1])
|
||||
to_return[self.uuid] = True
|
||||
except:
|
||||
to_return[self.uuid] = False
|
||||
for a in self.attributes:
|
||||
to_return.update(a.verify(gpg_uid))
|
||||
to_verify_global = self._serialize_sigs()
|
||||
with gpg.Context() as c:
|
||||
keys = list(c.keylist(gpg_uid))
|
||||
try:
|
||||
c.verify(to_verify_global, signature=base64.b64decode(self.global_sig), verify=keys[:1])
|
||||
to_return['global'] = True
|
||||
except:
|
||||
to_return['global'] = False
|
||||
return to_return
|
||||
|
||||
def load(self, json_event):
|
||||
self.new = False
|
||||
self.dump_full = True
|
||||
if isinstance(json_event, basestring) and os.path.exists(json_event):
|
||||
# NOTE: is it a good idea? (possible security issue if an untrusted user call this method)
|
||||
json_event = open(json_event, 'r')
|
||||
if hasattr(json_event, 'read'):
|
||||
# python2 and python3 compatible to find if we have a file
|
||||
json_event = json_event.read()
|
||||
if isinstance(json_event, basestring):
|
||||
json_event = json.loads(json_event)
|
||||
if json_event.get('response'):
|
||||
event = json_event.get('response')[0]
|
||||
else:
|
||||
event = json_event
|
||||
if not event:
|
||||
raise PyMISPError('Invalid event')
|
||||
# Invalid event created by MISP up to 2.4.52 (attribute_count is none instead of '0')
|
||||
if event.get('Event') and event.get('Event').get('attribute_count') is None:
|
||||
event['Event']['attribute_count'] = '0'
|
||||
jsonschema.validate(event, self.json_schema_lax)
|
||||
e = event.get('Event')
|
||||
self._reinitialize_event()
|
||||
self.set_all_values(**e)
|
||||
|
||||
def set_date(self, date, ignore_invalid=False):
|
||||
if isinstance(date, basestring) or isinstance(date, unicode):
|
||||
self.date = parse(date).date()
|
||||
elif isinstance(date, datetime.datetime):
|
||||
self.date = date.date()
|
||||
elif isinstance(date, datetime.date):
|
||||
self.date = date
|
||||
else:
|
||||
if ignore_invalid:
|
||||
self.date = datetime.date.today()
|
||||
else:
|
||||
raise NewEventError('Invalid format for the date: {} - {}'.format(date, type(date)))
|
||||
|
||||
def set_all_values(self, **kwargs):
|
||||
# Required value
|
||||
if kwargs.get('info'):
|
||||
self.info = kwargs['info']
|
||||
elif not self.info:
|
||||
raise NewAttributeError('The info field of the new event is required.')
|
||||
|
||||
# Default values for a valid event to send to a MISP instance
|
||||
if kwargs.get('distribution') is not None:
|
||||
self.distribution = int(kwargs['distribution'])
|
||||
if self.distribution not in [0, 1, 2, 3, 4]:
|
||||
raise NewEventError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4'.format(self.distribution))
|
||||
if kwargs.get('threat_level_id') is not None:
|
||||
self.threat_level_id = int(kwargs['threat_level_id'])
|
||||
if self.threat_level_id not in [1, 2, 3, 4]:
|
||||
raise NewEventError('{} is invalid, the threat_level has to be in 1, 2, 3, 4'.format(self.threat_level_id))
|
||||
if kwargs.get('analysis') is not None:
|
||||
self.analysis = int(kwargs['analysis'])
|
||||
if self.analysis not in [0, 1, 2]:
|
||||
raise NewEventError('{} is invalid, the analysis has to be in 0, 1, 2'.format(self.analysis))
|
||||
if kwargs.get('published') is not None:
|
||||
self.publish()
|
||||
if kwargs.get('date'):
|
||||
self.set_date(kwargs['date'])
|
||||
if kwargs.get('Attribute'):
|
||||
for a in kwargs['Attribute']:
|
||||
attribute = MISPAttribute(self.describe_types)
|
||||
attribute.set_all_values(**a)
|
||||
self.attributes.append(attribute)
|
||||
|
||||
# All other keys
|
||||
if kwargs.get('id'):
|
||||
self.id = int(kwargs['id'])
|
||||
if kwargs.get('orgc_id'):
|
||||
self.orgc_id = int(kwargs['orgc_id'])
|
||||
if kwargs.get('org_id'):
|
||||
self.org_id = int(kwargs['org_id'])
|
||||
if kwargs.get('uuid'):
|
||||
self.uuid = kwargs['uuid']
|
||||
if kwargs.get('attribute_count'):
|
||||
self.attribute_count = int(kwargs['attribute_count'])
|
||||
if kwargs.get('timestamp'):
|
||||
self.timestamp = datetime.datetime.fromtimestamp(int(kwargs['timestamp']))
|
||||
if kwargs.get('proposal_email_lock'):
|
||||
self.proposal_email_lock = kwargs['proposal_email_lock']
|
||||
if kwargs.get('locked'):
|
||||
self.locked = kwargs['locked']
|
||||
if kwargs.get('publish_timestamp'):
|
||||
self.publish_timestamp = datetime.datetime.fromtimestamp(int(kwargs['publish_timestamp']))
|
||||
if kwargs.get('sharing_group_id'):
|
||||
self.sharing_group_id = int(kwargs['sharing_group_id'])
|
||||
if kwargs.get('Org'):
|
||||
self.Org = kwargs['Org']
|
||||
if kwargs.get('Orgc'):
|
||||
self.Orgc = kwargs['Orgc']
|
||||
if kwargs.get('ShadowAttribute'):
|
||||
self.ShadowAttribute = kwargs['ShadowAttribute']
|
||||
if kwargs.get('RelatedEvent'):
|
||||
self.RelatedEvent = []
|
||||
for rel_event in kwargs['RelatedEvent']:
|
||||
sub_event = MISPEvent()
|
||||
sub_event.load(rel_event)
|
||||
self.RelatedEvent.append(sub_event)
|
||||
if kwargs.get('Galaxy'):
|
||||
self.Galaxy = kwargs['Galaxy']
|
||||
if kwargs.get('Tag'):
|
||||
self.Tag = kwargs['Tag']
|
||||
if kwargs.get('sig'):
|
||||
self.sig = kwargs['sig']
|
||||
if kwargs.get('global_sig'):
|
||||
self.global_sig = kwargs['global_sig']
|
||||
|
||||
def _json(self):
|
||||
to_return = {'Event': {}}
|
||||
to_return['Event'] = {'distribution': self.distribution, 'info': self.info,
|
||||
'date': self.date.isoformat(), 'published': self.published,
|
||||
'threat_level_id': self.threat_level_id,
|
||||
'analysis': self.analysis, 'Attribute': []}
|
||||
if self.sig:
|
||||
to_return['Event']['sig'] = self.sig
|
||||
if self.global_sig:
|
||||
to_return['Event']['global_sig'] = self.global_sig
|
||||
if self.uuid:
|
||||
to_return['Event']['uuid'] = self.uuid
|
||||
if self.Tag:
|
||||
to_return['Event']['Tag'] = self.Tag
|
||||
if self.Orgc:
|
||||
to_return['Event']['Orgc'] = self.Orgc
|
||||
if self.Galaxy:
|
||||
to_return['Event']['Galaxy'] = self.Galaxy
|
||||
if self.sharing_group_id:
|
||||
to_return['Event']['sharing_group_id'] = self.sharing_group_id
|
||||
to_return['Event'] = _int_to_str(to_return['Event'])
|
||||
if self.attributes:
|
||||
to_return['Event']['Attribute'] = [a._json() for a in self.attributes]
|
||||
jsonschema.validate(to_return, self.json_schema)
|
||||
return to_return
|
||||
|
||||
def _json_full(self):
|
||||
to_return = self._json()
|
||||
if self.id:
|
||||
to_return['Event']['id'] = self.id
|
||||
if self.orgc_id:
|
||||
to_return['Event']['orgc_id'] = self.orgc_id
|
||||
if self.org_id:
|
||||
to_return['Event']['org_id'] = self.org_id
|
||||
if self.locked is not None:
|
||||
to_return['Event']['locked'] = self.locked
|
||||
if self.attribute_count is not None:
|
||||
to_return['Event']['attribute_count'] = self.attribute_count
|
||||
if self.RelatedEvent:
|
||||
to_return['Event']['RelatedEvent'] = []
|
||||
for rel_event in self.RelatedEvent:
|
||||
to_return['Event']['RelatedEvent'].append(rel_event._json_full())
|
||||
if self.Org:
|
||||
to_return['Event']['Org'] = self.Org
|
||||
if self.sharing_group_id:
|
||||
to_return['Event']['sharing_group_id'] = self.sharing_group_id
|
||||
if self.ShadowAttribute:
|
||||
to_return['Event']['ShadowAttribute'] = self.ShadowAttribute
|
||||
if self.proposal_email_lock is not None:
|
||||
to_return['Event']['proposal_email_lock'] = self.proposal_email_lock
|
||||
if self.locked is not None:
|
||||
to_return['Event']['locked'] = self.locked
|
||||
if self.publish_timestamp:
|
||||
to_return['Event']['publish_timestamp'] = int(time.mktime(self.publish_timestamp.timetuple()))
|
||||
if self.timestamp:
|
||||
to_return['Event']['timestamp'] = int(time.mktime(self.timestamp.timetuple()))
|
||||
to_return['Event'] = _int_to_str(to_return['Event'])
|
||||
if self.attributes:
|
||||
to_return['Event']['Attribute'] = [a._json_full() for a in self.attributes]
|
||||
jsonschema.validate(to_return, self.json_schema)
|
||||
return to_return
|
||||
|
||||
def publish(self):
|
||||
self.published = True
|
||||
|
||||
def unpublish(self):
|
||||
self.published = False
|
||||
|
||||
def delete_attribute(self, attribute_id):
|
||||
found = False
|
||||
for a in self.attributes:
|
||||
if a.id == attribute_id or a.uuid == attribute_id:
|
||||
a.delete()
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
raise Exception('No attribute with UUID/ID {} found.'.format(attribute_id))
|
||||
|
||||
def add_attribute(self, type, value, **kwargs):
|
||||
attribute = MISPAttribute(self.describe_types)
|
||||
attribute.set_all_values(type=type, value=value, **kwargs)
|
||||
self.attributes.append(attribute)
|
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob
|
||||
import os
|
||||
from pymisp import MISPEvent
|
||||
|
||||
try:
|
||||
from py2neo import authenticate, Graph, Node, Relationship
|
||||
has_py2neo = True
|
||||
except ImportError:
|
||||
has_py2neo = False
|
||||
|
||||
|
||||
class Neo4j():
|
||||
|
||||
def __init__(self, host='localhost:7474', username='neo4j', password='neo4j'):
|
||||
if not has_py2neo:
|
||||
raise Exception('py2neo is required, please install: pip install py2neo')
|
||||
authenticate(host, username, password)
|
||||
self.graph = Graph("http://{}/db/data/".format(host))
|
||||
|
||||
def load_events_directory(self, directory):
|
||||
self.events = []
|
||||
for path in glob.glob(os.path.join(directory, '*.json')):
|
||||
e = MISPEvent()
|
||||
e.load(path)
|
||||
self.import_event(e)
|
||||
|
||||
def del_all(self):
|
||||
self.graph.delete_all()
|
||||
|
||||
def import_event(self, event):
|
||||
tx = self.graph.begin()
|
||||
event_node = Node('Event', uuid=event.uuid, name=event.info)
|
||||
# event_node['distribution'] = event.distribution
|
||||
# event_node['threat_level_id'] = event.threat_level_id
|
||||
# event_node['analysis'] = event.analysis
|
||||
# event_node['published'] = event.published
|
||||
# event_node['date'] = event.date.isoformat()
|
||||
tx.create(event_node)
|
||||
for a in event.attributes:
|
||||
attr_node = Node('Attribute', a.type, uuid=a.uuid)
|
||||
attr_node['category'] = a.category
|
||||
attr_node['name'] = a.value
|
||||
# attr_node['to_ids'] = a.to_ids
|
||||
# attr_node['comment'] = a.comment
|
||||
# attr_node['distribution'] = a.distribution
|
||||
tx.create(attr_node)
|
||||
member_rel = Relationship(event_node, "is member", attr_node)
|
||||
tx.create(member_rel)
|
||||
val = Node('Value', name=a.value)
|
||||
ev = Relationship(event_node, "has", val)
|
||||
av = Relationship(attr_node, "is", val)
|
||||
s = val | ev | av
|
||||
tx.merge(s)
|
||||
#tx.graph.push(s)
|
||||
tx.commit()
|
|
@ -0,0 +1,137 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pymisp import MISPEvent
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
has_bs4 = True
|
||||
except ImportError:
|
||||
has_bs4 = False
|
||||
|
||||
|
||||
iocMispMapping = {
|
||||
'DriverItem/DriverName': {'category': 'Artifacts dropped', 'type': 'other', 'comment': 'DriverName.'},
|
||||
|
||||
'DnsEntryItem/Host': {'type': 'domain'},
|
||||
|
||||
'Email/To': {'type': 'target-email'},
|
||||
'Email/Date': {'type': 'comment', 'comment': 'EmailDate.'},
|
||||
# 'Email/Body': {'type': 'email-subject'},
|
||||
'Email/From': {'type': 'email-dst'},
|
||||
'Email/Subject': {'type': 'email-subject'},
|
||||
'Email/Attachment/Name': {'type': 'email-attachment'},
|
||||
|
||||
'FileItem/Md5sum': {'type': 'md5'},
|
||||
'FileItem/Sha1sum': {'type': 'sha1'},
|
||||
'FileItem/Sha256sum': {'type': 'sha256'},
|
||||
|
||||
'ServiceItem/serviceDLLmd5sum': {'type': 'md5', 'category': 'Payload installation'},
|
||||
'ServiceItem/serviceDLLsha1sum': {'type': 'sha1', 'category': 'Payload installation'},
|
||||
'ServiceItem/serviceDLLsha256sum': {'type': 'sha256', 'category': 'Payload installation'},
|
||||
|
||||
'TaskItem/md5sum': {'type': 'md5'},
|
||||
'TaskItem/sha1sum': {'type': 'sha1'},
|
||||
'TaskItem/Sha256sum': {'type': 'sha256'},
|
||||
|
||||
'FileItem/FileName': {'type': 'filename'},
|
||||
'FileItem/FullPath': {'type': 'filename'},
|
||||
'FileItem/FilePath': {'type': 'filename'},
|
||||
'DriverItem/DriverName': {'type': 'filename'},
|
||||
|
||||
'Network/URI': {'type': 'uri'},
|
||||
'Network/DNS': {'type': 'domain'},
|
||||
'Network/String': {'type': 'ip-dst'},
|
||||
'RouteEntryItem/Destination': {'type': 'ip-dst'},
|
||||
'Network/UserAgent': {'type': 'user-agent'},
|
||||
|
||||
'PortItem/localIP': {'type': 'ip-src'},
|
||||
'PortItem/remoteIP': {'type': 'ip-dst'},
|
||||
|
||||
'ProcessItem/name': {'type': 'pattern-in-memory', 'comment': 'ProcessName.'},
|
||||
'ProcessItem/path': {'type': 'pattern-in-memory', 'comment': 'ProcessPath.'},
|
||||
'ProcessItem/Mutex': {'type': 'mutex'},
|
||||
'ProcessItem/Pipe/Name': {'type': 'named pipe'},
|
||||
'ProcessItem/Mutex/Name': {'type': 'mutex', 'comment': 'MutexName.'},
|
||||
|
||||
'CookieHistoryItem/HostName': {'type': 'hostname'},
|
||||
'FormHistoryItem/HostName': {'type': 'hostname'},
|
||||
'SystemInfoItem/HostName': {'type': 'hostname'},
|
||||
'UrlHistoryItem/HostName': {'type': 'hostname'},
|
||||
'DnsEntryItem/RecordName': {'type': 'hostname'},
|
||||
'DnsEntryItem/Host': {'type': 'hostname'},
|
||||
|
||||
# Is it the regkey value?
|
||||
# 'RegistryItem/Text': {'type': 'regkey', 'RegistryText. '},
|
||||
'RegistryItem/KeyPath': {'type': 'regkey'},
|
||||
'RegistryItem/Path': {'type': 'regkey'},
|
||||
|
||||
'ServiceItem/name': {'type': 'windows-service-name'},
|
||||
'ServiceItem/type': {'type': 'pattern-in-memory', 'comment': 'ServiceType. '},
|
||||
|
||||
'Snort/Snort': {'type': 'snort'},
|
||||
}
|
||||
|
||||
|
||||
def extract_field(report, field_name):
|
||||
data = report.find(field_name.lower())
|
||||
if data and hasattr(data, 'text'):
|
||||
return data.text
|
||||
return None
|
||||
|
||||
|
||||
def load_openioc(openioc):
|
||||
if not has_bs4:
|
||||
raise Exception('You need to install BeautifulSoup: pip install bs4')
|
||||
misp_event = MISPEvent()
|
||||
with open(openioc, "r") as ioc_file:
|
||||
iocreport = BeautifulSoup(ioc_file, "lxml")
|
||||
# Set event fields
|
||||
info = extract_field(iocreport, 'short_description')
|
||||
if info:
|
||||
misp_event.info = info
|
||||
date = extract_field(iocreport, 'authored_date')
|
||||
if date:
|
||||
misp_event.set_date(date)
|
||||
# Set special attributes
|
||||
description = extract_field(iocreport, 'description')
|
||||
if description:
|
||||
misp_event.add_attribute('comment', description)
|
||||
author = extract_field(iocreport, 'authored_by')
|
||||
if author:
|
||||
misp_event.add_attribute('comment', author)
|
||||
misp_event = set_all_attributes(iocreport, misp_event)
|
||||
return misp_event
|
||||
|
||||
|
||||
def get_mapping(openioc_type):
|
||||
t = openioc_type.lower()
|
||||
for k, v in iocMispMapping.items():
|
||||
if k.lower() == t:
|
||||
return v
|
||||
return False
|
||||
|
||||
|
||||
def set_all_attributes(openioc, misp_event):
|
||||
for item in openioc.find_all("indicatoritem"):
|
||||
attribute_values = {'comment': ''}
|
||||
if item.find('context'):
|
||||
mapping = get_mapping(item.find('context')['search'])
|
||||
if mapping:
|
||||
attribute_values.update(mapping)
|
||||
else:
|
||||
# Unknown mapping, ignoring
|
||||
# print(item.find('context'))
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
value = extract_field(item, 'Content')
|
||||
if value:
|
||||
attribute_values['value'] = value
|
||||
else:
|
||||
# No value, ignoring
|
||||
continue
|
||||
comment = extract_field(item, 'Comment')
|
||||
if comment:
|
||||
attribute_values["comment"] = '{} {}'.format(attribute_values["comment"], comment)
|
||||
misp_event.add_attribute(**attribute_values)
|
||||
return misp_event
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
try:
|
||||
from misp_stix_converter.converters.buildMISPAttribute import buildEvent
|
||||
from misp_stix_converter.converters import convert
|
||||
from misp_stix_converter.converters.convert import MISPtoSTIX
|
||||
has_misp_stix_converter = True
|
||||
except ImportError:
|
||||
has_misp_stix_converter = False
|
||||
|
||||
|
||||
def load_stix(stix, distribution=3, threat_level_id=2, analysis=0):
|
||||
'''Returns a MISPEvent object from a STIX package'''
|
||||
if not has_misp_stix_converter:
|
||||
raise Exception('You need to install misp_stix_converter: pip install git+https://github.com/MISP/MISP-STIX-Converter.git')
|
||||
stix = convert.load_stix(stix)
|
||||
return buildEvent(stix, distribution=distribution,
|
||||
threat_level_id=threat_level_id, analysis=analysis)
|
||||
|
||||
|
||||
def make_stix_package(misp_event, to_json=False, to_xml=False):
|
||||
'''Returns a STIXPackage from a MISPEvent.
|
||||
|
||||
Optionally can return the package in json or xml.
|
||||
|
||||
'''
|
||||
if not has_misp_stix_converter:
|
||||
raise Exception('You need to install misp_stix_converter: pip install git+https://github.com/MISP/MISP-STIX-Converter.git')
|
||||
package = MISPtoSTIX(misp_event)
|
||||
if to_json:
|
||||
return package.to_json()
|
||||
elif to_xml:
|
||||
return package.to_xml()
|
||||
else:
|
||||
return package
|
11
setup.py
11
setup.py
|
@ -12,17 +12,22 @@ setup(
|
|||
maintainer='Raphaël Vinot',
|
||||
url='https://github.com/MISP/PyMISP',
|
||||
description='Python API for MISP.',
|
||||
packages=['pymisp'],
|
||||
packages=['pymisp', 'pymisp.tools'],
|
||||
classifiers=[
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Console',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Intended Audience :: Science/Research',
|
||||
'Intended Audience :: Telecommunications Industry',
|
||||
'Programming Language :: Python',
|
||||
'Intended Audience :: Information Technology',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Topic :: Security',
|
||||
'Topic :: Internet',
|
||||
],
|
||||
test_suite="tests",
|
||||
install_requires=['requests'],
|
||||
install_requires=['requests', 'python-dateutil', 'jsonschema'],
|
||||
include_package_data=True,
|
||||
package_data={'data': ['schema.json', 'schema-lax.json', 'describeTypes.json']},
|
||||
)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
{"Event": {"info": "Ransomware - Xorist", "publish_timestamp": "1472548231", "timestamp": "1472541011", "analysis": "2", "Attribute": [{"category": "External analysis", "comment": "Imported via the Freetext Import Tool - Xchecked via VT: b3c4ae251f8094fa15b510051835c657eaef2a6cea46075d3aec964b14a99f68", "uuid": "57c5300c-0560-4146-bfaa-40e802de0b81", "timestamp": "1472540684", "to_ids": false, "value": "https://www.virustotal.com/file/b3c4ae251f8094fa15b510051835c657eaef2a6cea46075d3aec964b14a99f68/analysis/1469554268/", "type": "link"}, {"category": "External analysis", "comment": "", "uuid": "57c5310b-dc34-43cb-8b8e-4846950d210f", "timestamp": "1472541011", "to_ids": false, "value": "http://www.xylibox.com/2011/06/have-fun-with-trojan-ransomwin32xorist.html", "type": "link"}, {"category": "Other", "comment": "", "uuid": "57c444c0-8004-48fa-9c33-8aca950d210f", "timestamp": "1472480448", "to_ids": false, "value": "UPX packed", "type": "comment"}, {"category": "Other", "comment": "", "uuid": "57c44648-96f4-45d4-a8eb-453e950d210f", "timestamp": "1472480840", "to_ids": false, "value": "Key: 85350044dF4AC3518D185678A9414A7F,\r\nEncryption rounds:8,\r\nStart offset: 64,\r\nAlgorithm: TEA", "type": "text"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448a-fb04-457d-87e7-4127950d210f", "timestamp": "1472480394", "to_ids": true, "value": "3Z4wnG9603it23y.exe", "type": "filename"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448b-454c-4d17-90d1-4d2f950d210f", "timestamp": "1472480395", "to_ids": true, "value": "0749bae92ca336a02c83d126e04ec628", "type": "md5"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448a-bef0-4ba7-a071-444e950d210f", "timestamp": "1472480394", "to_ids": true, "value": "77b0c41b7d340b8a3d903f21347bbf06aa766b5b", "type": "sha1"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448b-3fa4-4d65-9ccc-4afa950d210f", "timestamp": "1472480395", "to_ids": true, "value": "b3c4ae251f8094fa15b510051835c657eaef2a6cea46075d3aec964b14a99f68", "type": "sha256"}, {"category": "Persistence mechanism", "comment": "", "uuid": "57c54b0f-27a4-458b-8e63-4455950d210f", "timestamp": "1472547599", "to_ids": true, "value": "Software\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Run|%TEMP%\\3Z4wnG9603it23y.exe", "type": "regkey|value"}], "Tag": [{"colour": "#ffffff", "exportable": true, "name": "tlp:white"}, {"colour": "#3d7a00", "exportable": true, "name": "circl:incident-classification=\"malware\""}, {"colour": "#420053", "exportable": true, "name": "ms-caro-malware:malware-type=\"Ransom\""}, {"colour": "#2c4f00", "exportable": true, "name": "malware_classification:malware-category=\"Ransomware\""}], "published": true, "date": "2016-08-29", "Orgc": {"name": "CIRCL", "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f"}, "threat_level_id": "3", "uuid": "57c4445b-c548-4654-af0b-4be3950d210f"}}
|
|
@ -0,0 +1,78 @@
|
|||
{
|
||||
"Attribute": [
|
||||
{
|
||||
"ShadowAttribute": [],
|
||||
"SharingGroup": [],
|
||||
"category": "Payload delivery",
|
||||
"comment": "",
|
||||
"deleted": false,
|
||||
"distribution": "5",
|
||||
"event_id": "2",
|
||||
"id": "7",
|
||||
"sharing_group_id": "0",
|
||||
"timestamp": "1465681304",
|
||||
"to_ids": false,
|
||||
"type": "url",
|
||||
"uuid": "575c8598-f1f0-4c16-a94a-0612c0a83866",
|
||||
"value": "http://fake.website.com/malware/is/here"
|
||||
},
|
||||
{
|
||||
"ShadowAttribute": [],
|
||||
"SharingGroup": [],
|
||||
"category": "Payload type",
|
||||
"comment": "",
|
||||
"deleted": false,
|
||||
"distribution": "5",
|
||||
"event_id": "2",
|
||||
"id": "6",
|
||||
"sharing_group_id": "0",
|
||||
"timestamp": "1465681801",
|
||||
"to_ids": false,
|
||||
"type": "text",
|
||||
"uuid": "575c8549-9010-4555-8b37-057ac0a83866",
|
||||
"value": "Locky"
|
||||
}
|
||||
],
|
||||
"Org": {
|
||||
"id": "1",
|
||||
"name": "ORGNAME",
|
||||
"uuid": "57586e9a-4a64-4f79-9009-4dc1c0a83866"
|
||||
},
|
||||
"Orgc": {
|
||||
"id": "1",
|
||||
"name": "ORGNAME",
|
||||
"uuid": "57586e9a-4a64-4f79-9009-4dc1c0a83866"
|
||||
},
|
||||
"RelatedEvent": [],
|
||||
"ShadowAttribute": [],
|
||||
"Tag": [
|
||||
{
|
||||
"colour": "#005a5a",
|
||||
"exportable": true,
|
||||
"id": "6",
|
||||
"name": "ecsirt:malicious-code=\"ransomware\""
|
||||
},
|
||||
{
|
||||
"colour": "#142bf7",
|
||||
"exportable": true,
|
||||
"id": "1",
|
||||
"name": "for_intelmq_processing"
|
||||
}
|
||||
],
|
||||
"analysis": "0",
|
||||
"attribute_count": "2",
|
||||
"date": "2016-06-09",
|
||||
"distribution": "0",
|
||||
"id": "2",
|
||||
"info": "A Random Event",
|
||||
"locked": false,
|
||||
"org_id": "1",
|
||||
"orgc_id": "1",
|
||||
"proposal_email_lock": false,
|
||||
"publish_timestamp": "0",
|
||||
"published": false,
|
||||
"sharing_group_id": "0",
|
||||
"threat_level_id": "1",
|
||||
"timestamp": "1465681801",
|
||||
"uuid": "5758ebf5-c898-48e6-9fe9-5665c0a83866"
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"Event": {
|
||||
"uuid": "57c06bb1-625c-4d34-9b9f-4066950d210f",
|
||||
"orgc_id": "1",
|
||||
"publish_timestamp": "0",
|
||||
"RelatedEvent": [],
|
||||
"org_id": "1",
|
||||
"Org": {
|
||||
"uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f",
|
||||
"name": "CIRCL",
|
||||
"id": "1"
|
||||
},
|
||||
"attribute_count": null,
|
||||
"distribution": "0",
|
||||
"sharing_group_id": "0",
|
||||
"threat_level_id": "1",
|
||||
"locked": false,
|
||||
"Attribute": [],
|
||||
"published": false,
|
||||
"ShadowAttribute": [],
|
||||
"date": "2016-08-26",
|
||||
"info": "This is a test",
|
||||
"timestamp": "1472228273",
|
||||
"Orgc": {
|
||||
"uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f",
|
||||
"name": "CIRCL",
|
||||
"id": "1"
|
||||
},
|
||||
"id": "594",
|
||||
"proposal_email_lock": false,
|
||||
"analysis": "0"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
[
|
||||
{
|
||||
"id": "3",
|
||||
"org": "",
|
||||
"date": "2016-12-01",
|
||||
"info": "Another random Event",
|
||||
"published": false,
|
||||
"uuid": "5758ebf5-c898-48e6-9fe9-5665c0a83866",
|
||||
"attribute_count": "2",
|
||||
"analysis": "0",
|
||||
"orgc": "",
|
||||
"timestamp": "1465681801",
|
||||
"distribution": "3",
|
||||
"proposal_email_lock": false,
|
||||
"locked": false,
|
||||
"threat_level_id": "1",
|
||||
"publish_timestamp": "0",
|
||||
"sharing_group_id": "0",
|
||||
"org_id": "1",
|
||||
"orgc_id": "1",
|
||||
"Org": {
|
||||
"id": "1",
|
||||
"name": "ORGNAME"
|
||||
},
|
||||
"Orgc": {
|
||||
"id": "1",
|
||||
"name": "ORGNAME"
|
||||
},
|
||||
"EventTag": [
|
||||
{
|
||||
"id": "9760",
|
||||
"event_id": "6028",
|
||||
"tag_id": "4",
|
||||
"Tag": {
|
||||
"id": "4",
|
||||
"name": "TLP:GREEN",
|
||||
"colour": "#33822d",
|
||||
"exportable": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "9801",
|
||||
"event_id": "3",
|
||||
"tag_id": "1",
|
||||
"Tag": {
|
||||
"id": "1",
|
||||
"name": "for_intelmq_processing",
|
||||
"colour": "#00ad1c",
|
||||
"exportable": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "9803",
|
||||
"event_id": "3",
|
||||
"tag_id": "6",
|
||||
"Tag": {
|
||||
"id": "6",
|
||||
"name": "ecsirt:malicious-code=\"ransomware\"",
|
||||
"colour": "#005a5a",
|
||||
"exportable": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"SharingGroup": {
|
||||
"id": null,
|
||||
"name": null
|
||||
}
|
||||
}
|
||||
]
|
|
@ -0,0 +1,100 @@
|
|||
{
|
||||
"response": [
|
||||
{
|
||||
"SharingGroup": {
|
||||
"id": "1",
|
||||
"name": "PrivateTrustedGroup",
|
||||
"description": "",
|
||||
"releasability": "",
|
||||
"local": true,
|
||||
"active": true
|
||||
},
|
||||
"Organisation": {
|
||||
"id": "1",
|
||||
"name": "CIRCL",
|
||||
"uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f"
|
||||
},
|
||||
"SharingGroupOrg": [
|
||||
{
|
||||
"id": "1",
|
||||
"sharing_group_id": "1",
|
||||
"org_id": "1",
|
||||
"extend": true,
|
||||
"Organisation": {
|
||||
"name": "CIRCL",
|
||||
"id": "1",
|
||||
"uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"sharing_group_id": "1",
|
||||
"org_id": "2",
|
||||
"extend": false,
|
||||
"Organisation": {
|
||||
"name": "PifPafPoum",
|
||||
"id": "2",
|
||||
"uuid": "56bf12a7-c19c-4b98-83e7-d9bb02de0b81"
|
||||
}
|
||||
}
|
||||
],
|
||||
"SharingGroupServer": [
|
||||
{
|
||||
"all_orgs": false,
|
||||
"server_id": "0",
|
||||
"sharing_group_id": "1",
|
||||
"Server": []
|
||||
}
|
||||
],
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"SharingGroup": {
|
||||
"id": "2",
|
||||
"name": "test",
|
||||
"description": "",
|
||||
"releasability": "",
|
||||
"local": true,
|
||||
"active": true
|
||||
},
|
||||
"Organisation": {
|
||||
"id": "1",
|
||||
"name": "CIRCL",
|
||||
"uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f"
|
||||
},
|
||||
"SharingGroupOrg": [
|
||||
{
|
||||
"id": "3",
|
||||
"sharing_group_id": "2",
|
||||
"org_id": "1",
|
||||
"extend": true,
|
||||
"Organisation": {
|
||||
"name": "CIRCL",
|
||||
"id": "1",
|
||||
"uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"sharing_group_id": "2",
|
||||
"org_id": "2",
|
||||
"extend": false,
|
||||
"Organisation": {
|
||||
"name": "PifPafPoum",
|
||||
"id": "2",
|
||||
"uuid": "56bf12a7-c19c-4b98-83e7-d9bb02de0b81"
|
||||
}
|
||||
}
|
||||
],
|
||||
"SharingGroupServer": [
|
||||
{
|
||||
"all_orgs": false,
|
||||
"server_id": "0",
|
||||
"sharing_group_id": "2",
|
||||
"Server": []
|
||||
}
|
||||
],
|
||||
"editable": true
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import print_function
|
||||
|
||||
from pymisp import PyMISP
|
||||
from keys import url, key
|
||||
|
@ -41,13 +42,14 @@ class TestBasic(unittest.TestCase):
|
|||
event = self.misp.new_event(0, 1, 0, "This is a test")
|
||||
event_id = self._clean_event(event)
|
||||
to_check = {u'Event': {u'info': u'This is a test', u'locked': False,
|
||||
u'attribute_count': None, u'analysis': u'0',
|
||||
u'attribute_count': None, 'disable_correlation': False, u'analysis': u'0',
|
||||
u'ShadowAttribute': [], u'published': False,
|
||||
u'distribution': u'0', u'Attribute': [], u'proposal_email_lock': False,
|
||||
u'Org': {u'name': u'ORGNAME'},
|
||||
u'Orgc': {u'name': u'ORGNAME'},
|
||||
u'Galaxy': [],
|
||||
u'threat_level_id': u'1'}}
|
||||
print event
|
||||
print(event)
|
||||
self.assertEqual(event, to_check, 'Failed at creating a new Event')
|
||||
return int(event_id)
|
||||
|
||||
|
@ -61,6 +63,7 @@ class TestBasic(unittest.TestCase):
|
|||
u'ShadowAttribute': [], u'published': False, u'distribution': u'0',
|
||||
u'Org': {u'name': u'ORGNAME'},
|
||||
u'Orgc': {u'name': u'ORGNAME'},
|
||||
u'Galaxy': [],
|
||||
u'Attribute': [
|
||||
{u'category': u'Payload installation', u'comment': u'Fanny modules',
|
||||
u'to_ids': False, u'value': u'dll_installer.dll|0a209ac0de4ac033f31d6ba9191a8f7a',
|
||||
|
@ -84,6 +87,7 @@ class TestBasic(unittest.TestCase):
|
|||
u'ShadowAttribute': [], u'published': True, u'distribution': u'0',
|
||||
u'Org': {u'name': u'ORGNAME'},
|
||||
u'Orgc': {u'name': u'ORGNAME'},
|
||||
u'Galaxy': [],
|
||||
u'Attribute': [
|
||||
{u'category': u'Payload installation', u'comment': u'Fanny modules',
|
||||
u'to_ids': False, u'value': u'dll_installer.dll|0a209ac0de4ac033f31d6ba9191a8f7a',
|
||||
|
@ -99,15 +103,19 @@ class TestBasic(unittest.TestCase):
|
|||
|
||||
def delete(self, eventid):
|
||||
event = self.misp.delete_event(eventid)
|
||||
print event.json()
|
||||
print(event)
|
||||
|
||||
def delete_attr(self, attrid):
|
||||
event = self.misp.delete_attribute(attrid)
|
||||
print event.json()
|
||||
print(event)
|
||||
|
||||
def get(self, eventid):
|
||||
event = self.misp.get_event(eventid)
|
||||
print event.json()
|
||||
print(event)
|
||||
|
||||
def get_stix(self, **kwargs):
|
||||
event = self.misp.get_stix(kwargs)
|
||||
print(event)
|
||||
|
||||
def add(self):
|
||||
event = {u'Event': {u'info': u'This is a test', u'locked': False,
|
||||
|
@ -125,7 +133,7 @@ class TestBasic(unittest.TestCase):
|
|||
u'ShadowAttribute': [], u'distribution': u'2', u'type': u'filename|sha256'}],
|
||||
u'proposal_email_lock': False, u'threat_level_id': u'1'}}
|
||||
event = self.misp.add_event(event)
|
||||
print event.json()
|
||||
print(event)
|
||||
|
||||
def test_create_event(self):
|
||||
eventid = self.new_event()
|
||||
|
@ -151,6 +159,9 @@ class TestBasic(unittest.TestCase):
|
|||
time.sleep(1)
|
||||
self.delete(eventid)
|
||||
|
||||
def test_one_or_more(self):
|
||||
self.assertEqual(self.misp._one_or_more(1), (1,))
|
||||
self.assertEqual(self.misp._one_or_more([1]), [1])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -0,0 +1,223 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
import requests_mock
|
||||
import json
|
||||
import os
|
||||
|
||||
import pymisp as pm
|
||||
from pymisp import PyMISP
|
||||
# from pymisp import NewEventError
|
||||
from pymisp import MISPEvent
|
||||
from pymisp import EncodeUpdate
|
||||
from pymisp import EncodeFull
|
||||
|
||||
|
||||
@requests_mock.Mocker()
|
||||
class TestOffline(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.maxDiff = None
|
||||
self.domain = 'http://misp.local/'
|
||||
self.key = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
with open('tests/misp_event.json', 'r') as f:
|
||||
self.event = {'Event': json.load(f)}
|
||||
with open('tests/new_misp_event.json', 'r') as f:
|
||||
self.new_misp_event = {'Event': json.load(f)}
|
||||
self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../pymisp/data')
|
||||
with open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r') as f:
|
||||
self.types = json.load(f)
|
||||
with open('tests/sharing_groups.json', 'r') as f:
|
||||
self.sharing_groups = json.load(f)
|
||||
self.auth_error_msg = {"name": "Authentication failed. Please make sure you pass the API key of an API enabled user along in the Authorization header.",
|
||||
"message": "Authentication failed. Please make sure you pass the API key of an API enabled user along in the Authorization header.",
|
||||
"url": "\/events\/1"}
|
||||
with open('tests/search_index_result.json', 'r') as f:
|
||||
self.search_index_result = json.load(f)
|
||||
|
||||
def initURI(self, m):
|
||||
m.register_uri('GET', self.domain + 'events/1', json=self.auth_error_msg, status_code=403)
|
||||
m.register_uri('GET', self.domain + 'servers/getVersion.json', json={"version": "2.4.56"})
|
||||
m.register_uri('GET', self.domain + 'sharing_groups.json', json=self.sharing_groups)
|
||||
m.register_uri('GET', self.domain + 'attributes/describeTypes.json', json=self.types)
|
||||
m.register_uri('GET', self.domain + 'events/2', json=self.event)
|
||||
m.register_uri('POST', self.domain + 'events/5758ebf5-c898-48e6-9fe9-5665c0a83866', json=self.event)
|
||||
m.register_uri('DELETE', self.domain + 'events/2', json={'message': 'Event deleted.'})
|
||||
m.register_uri('DELETE', self.domain + 'events/3', json={'errors': ['Invalid event'], 'message': 'Invalid event', 'name': 'Invalid event', 'url': '/events/3'})
|
||||
m.register_uri('DELETE', self.domain + 'attributes/2', json={'message': 'Attribute deleted.'})
|
||||
m.register_uri('GET', self.domain + 'events/index/searchtag:1', json=self.search_index_result)
|
||||
m.register_uri('GET', self.domain + 'events/index/searchtag:ecsirt:malicious-code=%22ransomware%22', json=self.search_index_result)
|
||||
|
||||
def test_getEvent(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
e1 = pymisp.get_event(2)
|
||||
e2 = pymisp.get(2)
|
||||
self.assertEqual(e1, e2)
|
||||
self.assertEqual(self.event, e2)
|
||||
|
||||
def test_updateEvent(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
e0 = pymisp.update_event('5758ebf5-c898-48e6-9fe9-5665c0a83866', json.dumps(self.event))
|
||||
e1 = pymisp.update_event('5758ebf5-c898-48e6-9fe9-5665c0a83866', self.event)
|
||||
self.assertEqual(e0, e1)
|
||||
e2 = pymisp.update(e0)
|
||||
self.assertEqual(e1, e2)
|
||||
self.assertEqual(self.event, e2)
|
||||
|
||||
def test_deleteEvent(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
d = pymisp.delete_event(2)
|
||||
self.assertEqual(d, {'message': 'Event deleted.'})
|
||||
d = pymisp.delete_event(3)
|
||||
self.assertEqual(d, {'errors': ['Invalid event'], 'message': 'Invalid event', 'name': 'Invalid event', 'url': '/events/3'})
|
||||
|
||||
def test_deleteAttribute(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
d = pymisp.delete_attribute(2)
|
||||
self.assertEqual(d, {'message': 'Attribute deleted.'})
|
||||
|
||||
def test_publish(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
e = pymisp.publish(self.event) # requests-mock always return the non-published event
|
||||
pub = self.event
|
||||
pub['Event']['published'] = True
|
||||
# self.assertEqual(e, pub) FIXME: broken test, not-published event returned
|
||||
e = pymisp.publish(self.event)
|
||||
self.assertEqual(e, {'error': 'Already published'})
|
||||
|
||||
def test_getVersions(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
api_version = pymisp.get_api_version()
|
||||
self.assertEqual(api_version, {'version': pm.__version__})
|
||||
server_version = pymisp.get_version()
|
||||
self.assertEqual(server_version, {"version": "2.4.56"})
|
||||
|
||||
def test_getSharingGroups(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
sharing_groups = pymisp.get_sharing_groups()
|
||||
self.assertEqual(sharing_groups[0], self.sharing_groups['response'][0])
|
||||
|
||||
def test_auth_error(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
error = pymisp.get(1)
|
||||
response = self.auth_error_msg
|
||||
response['errors'] = [response['message']]
|
||||
self.assertEqual(error, response)
|
||||
|
||||
def test_newEvent(self, m):
|
||||
error_empty_info = {'message': 'The event could not be saved.', 'name': 'Add event failed.', 'errors': {'Event': {'info': ['Info cannot be empty.']}}, 'url': '/events/add'}
|
||||
error_empty_info_flatten = {u'message': u'The event could not be saved.', u'name': u'Add event failed.', u'errors': [u"Error in info: Info cannot be empty."], u'url': u'/events/add'}
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
m.register_uri('POST', self.domain + 'events', json=error_empty_info)
|
||||
# TODO Add test exception if info field isn't set
|
||||
response = pymisp.new_event(0, 1, 0, 'Foo')
|
||||
self.assertEqual(response, error_empty_info_flatten)
|
||||
m.register_uri('POST', self.domain + 'events', json=self.new_misp_event)
|
||||
response = pymisp.new_event(0, 1, 0, "This is a test.", '2016-08-26', False)
|
||||
self.assertEqual(response, self.new_misp_event)
|
||||
|
||||
def test_eventObject(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
misp_event = MISPEvent(pymisp.describe_types)
|
||||
with open('tests/57c4445b-c548-4654-af0b-4be3950d210f.json', 'r') as f:
|
||||
misp_event.load(f.read())
|
||||
json.dumps(misp_event, cls=EncodeUpdate)
|
||||
json.dumps(misp_event, cls=EncodeFull)
|
||||
|
||||
def test_searchIndexByTagId(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
response = pymisp.search_index(tag="1")
|
||||
self.assertEqual(response['response'], self.search_index_result)
|
||||
|
||||
def test_searchIndexByTagName(self, m):
|
||||
self.initURI(m)
|
||||
pymisp = PyMISP(self.domain, self.key)
|
||||
response = pymisp.search_index(tag='ecsirt:malicious-code="ransomware"')
|
||||
self.assertEqual(response['response'], self.search_index_result)
|
||||
|
||||
def test_addAttributes(self, m):
|
||||
class MockPyMISP(PyMISP):
|
||||
def _send_attributes(self, event, attributes, proposal=False):
|
||||
return len(attributes)
|
||||
self.initURI(m)
|
||||
p = MockPyMISP(self.domain, self.key)
|
||||
evt = p.get(1)
|
||||
self.assertEquals(3, p.add_hashes(evt, md5='68b329da9893e34099c7d8ad5cb9c940',
|
||||
sha1='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc',
|
||||
sha256='01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b',
|
||||
filename='foobar.exe'))
|
||||
self.assertEquals(3, p.add_hashes(evt, md5='68b329da9893e34099c7d8ad5cb9c940',
|
||||
sha1='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc',
|
||||
sha256='01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b'))
|
||||
p.av_detection_link(evt, 'https://foocorp.com')
|
||||
p.add_detection_name(evt, 'WATERMELON')
|
||||
p.add_filename(evt, 'foobar.exe')
|
||||
p.add_regkey(evt, 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\foobar')
|
||||
p.add_regkey(evt, 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\foobar', rvalue='foobar')
|
||||
regkeys = {
|
||||
'HKLM\\Software\\Microsoft\\Outlook\\Addins\\foo': None,
|
||||
'HKLM\\Software\\Microsoft\\Outlook\\Addins\\bar': 'baz',
|
||||
'HKLM\\Software\\Microsoft\\Outlook\\Addins\\bae': 0,
|
||||
}
|
||||
self.assertEqual(3, p.add_regkeys(evt, regkeys))
|
||||
p.add_pattern(evt, '.*foobar.*', in_memory=True)
|
||||
p.add_pattern(evt, '.*foobar.*', in_file=True)
|
||||
self.assertRaises(pm.PyMISPError, p.add_pattern, evt, '.*foobar.*', in_memory=False, in_file=False)
|
||||
p.add_pipe(evt, 'foo')
|
||||
p.add_pipe(evt, '\\.\\pipe\\foo')
|
||||
self.assertEquals(3, p.add_pipe(evt, ['foo', 'bar', 'baz']))
|
||||
self.assertEquals(3, p.add_pipe(evt, ['foo', 'bar', '\\.\\pipe\\baz']))
|
||||
p.add_mutex(evt, 'foo')
|
||||
self.assertEquals(1, p.add_mutex(evt, '\\BaseNamedObjects\\foo'))
|
||||
self.assertEquals(3, p.add_mutex(evt, ['foo', 'bar', 'baz']))
|
||||
self.assertEquals(3, p.add_mutex(evt, ['foo', 'bar', '\\BaseNamedObjects\\baz']))
|
||||
p.add_yara(evt, 'rule Foo {}')
|
||||
self.assertEquals(2, p.add_yara(evt, ['rule Foo {}', 'rule Bar {}']))
|
||||
p.add_ipdst(evt, '1.2.3.4')
|
||||
self.assertEquals(2, p.add_ipdst(evt, ['1.2.3.4', '5.6.7.8']))
|
||||
p.add_ipsrc(evt, '1.2.3.4')
|
||||
self.assertEquals(2, p.add_ipsrc(evt, ['1.2.3.4', '5.6.7.8']))
|
||||
p.add_hostname(evt, 'a.foobar.com')
|
||||
self.assertEquals(2, p.add_hostname(evt, ['a.foobar.com', 'a.foobaz.com']))
|
||||
p.add_domain(evt, 'foobar.com')
|
||||
self.assertEquals(2, p.add_domain(evt, ['foobar.com', 'foobaz.com']))
|
||||
p.add_domain_ip(evt, 'foo.com', '1.2.3.4')
|
||||
self.assertEquals(2, p.add_domain_ip(evt, 'foo.com', ['1.2.3.4', '5.6.7.8']))
|
||||
self.assertEquals(2, p.add_domains_ips(evt, {'foo.com': '1.2.3.4', 'bar.com': '4.5.6.7'}))
|
||||
p.add_url(evt, 'https://example.com')
|
||||
self.assertEquals(2, p.add_url(evt, ['https://example.com', 'http://foo.com']))
|
||||
p.add_useragent(evt, 'Mozilla')
|
||||
self.assertEquals(2, p.add_useragent(evt, ['Mozilla', 'Godzilla']))
|
||||
p.add_traffic_pattern(evt, 'blabla')
|
||||
p.add_snort(evt, 'blaba')
|
||||
p.add_net_other(evt, 'blabla')
|
||||
p.add_email_src(evt, 'foo@bar.com')
|
||||
p.add_email_dst(evt, 'foo@bar.com')
|
||||
p.add_email_subject(evt, 'you won the lottery')
|
||||
p.add_email_attachment(evt, 'foo.doc')
|
||||
p.add_target_email(evt, 'foo@bar.com')
|
||||
p.add_target_user(evt, 'foo')
|
||||
p.add_target_machine(evt, 'foobar')
|
||||
p.add_target_org(evt, 'foobar')
|
||||
p.add_target_location(evt, 'foobar')
|
||||
p.add_target_external(evt, 'foobar')
|
||||
p.add_threat_actor(evt, 'WATERMELON')
|
||||
p.add_internal_link(evt, 'foobar')
|
||||
p.add_internal_comment(evt, 'foobar')
|
||||
p.add_internal_text(evt, 'foobar')
|
||||
p.add_internal_other(evt, 'foobar')
|
||||
p.add_attachment(evt, "testFile", "Attacment added!")
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
Reference in New Issue