diff --git a/.gitignore b/.gitignore index 4b22c47..39eab06 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ +*.swp +*.pem *.pyc examples/keys.py examples/cudeso.py +examples/feed-generator/output/*.json build/* dist/* pymisp.egg-info/* diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..fa42b95 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,25 @@ +language: python + +cache: pip + +python: + - "2.7" + - "3.3" + - "3.4" + - "3.5" + - "3.5-dev" + - "nightly" + +install: + - pip install -U nose + - pip install coveralls + - pip install codecov + - pip install requests-mock + - pip install . + +script: + - nosetests --with-coverage --cover-package=pymisp tests/test_offline.py + +after_success: + - codecov + - coveralls diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..d1cf49c --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include pymisp/data/* diff --git a/README.md b/README.md index 5a05cdb..204a434 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,10 @@ +README +====== + +[![Documentation Status](https://readthedocs.org/projects/pymisp/badge/?version=master)](http://pymisp.readthedocs.io/en/master/?badge=master) +[![Build Status](https://travis-ci.org/MISP/PyMISP.svg?branch=master)](https://travis-ci.org/MISP/PyMISP) +[![Coverage Status](https://coveralls.io/repos/github/MISP/PyMISP/badge.svg?branch=master)](https://coveralls.io/github/MISP/PyMISP?branch=master) + # PyMISP - Python Library to access MISP PyMISP is a Python library to access [MISP](https://github.com/MISP/MISP) platforms via their REST API. @@ -8,40 +15,47 @@ PyMISP allows you to fetch events, add or update events/attributes, add or updat * [requests](http://docs.python-requests.org) -## Install +## Install from pip -~~~~ +``` +pip install pymisp +``` + +## Install the lastest version from repo + +``` +git clone https://github.com/CIRCL/PyMISP.git && cd PyMISP python setup.py install -~~~~ +``` -## Samples and how to use PyMISP +## Samples and how to use PyMISP Various examples and samples scripts are in the [examples/](examples/) directory. In the examples directory, you will need to change the keys.py.sample to enter your MISP url and API key. -~~~~ +``` cd examples cp keys.py.sample keys.py vim keys.py -~~~~ +``` The API key of MISP is available in the Automation section of the MISP web interface. To test if your URL and API keys are correct, you can test with examples/last.py to fetch the last 10 events published. -~~~~ +``` cd examples python last.py -l 10 -~~~~ +``` ## Documentation -[PyMISP API documentation is available](http://www.circl.lu/assets/files/PyMISP.pdf). +[PyMISP API documentation is available](https://media.readthedocs.org/pdf/pymisp/master/pymisp.pdf). Documentation can be generated with epydoc: -~~~~ - epydoc --url https://github.com/CIRCL/PyMISP --graph all --name PyMISP --pdf pymisp -o doc -~~~~ +``` +epydoc --url https://github.com/CIRCL/PyMISP --graph all --name PyMISP --pdf pymisp -o doc +``` diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..fda38db --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,225 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " epub3 to make an epub3" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + @echo " dummy to check syntax errors of document sources" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: singlehtml +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +.PHONY: pickle +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +.PHONY: json +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +.PHONY: htmlhelp +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +.PHONY: qthelp +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PyMISP.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyMISP.qhc" + +.PHONY: applehelp +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +.PHONY: devhelp +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/PyMISP" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PyMISP" + @echo "# devhelp" + +.PHONY: epub +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +.PHONY: epub3 +epub3: + $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 + @echo + @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." + +.PHONY: latex +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +.PHONY: latexpdf +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: latexpdfja +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: text +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +.PHONY: man +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +.PHONY: texinfo +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +.PHONY: info +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +.PHONY: gettext +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +.PHONY: changes +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +.PHONY: xml +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +.PHONY: pseudoxml +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +.PHONY: dummy +dummy: + $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy + @echo + @echo "Build finished. Dummy builder generates no files." diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..eeecc5e --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,448 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# PyMISP documentation build configuration file, created by +# sphinx-quickstart on Fri Aug 26 11:39:17 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('.')) + +from recommonmark.parser import CommonMarkParser + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.napoleon', +] + +napoleon_google_docstring = False +napoleon_use_param = False +napoleon_use_ivar = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +source_parsers = { + '.md': CommonMarkParser, +} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = ['.rst', '.md'] + +# The encoding of source files. +# +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'PyMISP' +copyright = '2016, Raphaël Vinot' +author = 'Raphaël Vinot' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '2.4.50' +# The full version, including alpha/beta/rc tags. +release = '2.4.50' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# +# today = '' +# +# Else, today_fmt is used as the format for a strftime call. +# +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. +# " v documentation" by default. +# +# html_title = 'PyMISP v2.4.50' + +# A shorter title for the navigation bar. Default is the same as html_title. +# +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# +# html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# +# html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +# +# html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# +# html_additional_pages = {} + +# If false, no module index is generated. +# +# html_domain_indices = True + +# If false, no index is generated. +# +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' +# +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +# +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'PyMISPdoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'PyMISP.tex', 'PyMISP Documentation', + 'Raphaël Vinot', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# +# latex_use_parts = False + +# If true, show page references after internal links. +# +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# +# latex_appendices = [] + +# It false, will not define \strong, \code, itleref, \crossref ... but only +# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added +# packages. +# +# latex_keep_old_macro_names = True + +# If false, no module index is generated. +# +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'pymisp', 'PyMISP Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +# +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'PyMISP', 'PyMISP Documentation', + author, 'PyMISP', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# +# texinfo_appendices = [] + +# If false, no module index is generated. +# +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# +# texinfo_no_detailmenu = False + + +# -- Options for Epub output ---------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project +epub_author = author +epub_publisher = author +epub_copyright = copyright + +# The basename for the epub file. It defaults to the project name. +# epub_basename = project + +# The HTML theme for the epub output. Since the default themes are not +# optimized for small screen space, using the same theme for HTML and epub +# output is usually not wise. This defaults to 'epub', a theme designed to save +# visual space. +# +# epub_theme = 'epub' + +# The language of the text. It defaults to the language option +# or 'en' if the language is not set. +# +# epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +# epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +# +# epub_cover = () + +# A sequence of (type, uri, title) tuples for the guide element of content.opf. +# +# epub_guide = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +# +# epub_pre_files = [] + +# HTML files that should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +# +# epub_post_files = [] + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + +# The depth of the table of contents in toc.ncx. +# +# epub_tocdepth = 3 + +# Allow duplicate toc entries. +# +# epub_tocdup = True + +# Choose between 'default' and 'includehidden'. +# +# epub_tocscope = 'default' + +# Fix unsupported image types using the Pillow. +# +# epub_fix_images = False + +# Scale large images. +# +# epub_max_image_width = 0 + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# +# epub_show_urls = 'inline' + +# If false, no index is generated. +# +# epub_use_index = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..9a68fb7 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,25 @@ +.. PyMISP documentation master file, created by + sphinx-quickstart on Fri Aug 26 11:39:17 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PyMISP's documentation! +================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + readme + modules + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/source/modules.rst b/docs/source/modules.rst new file mode 100644 index 0000000..1bb98dd --- /dev/null +++ b/docs/source/modules.rst @@ -0,0 +1,7 @@ +pymisp +====== + +.. toctree:: + :maxdepth: 4 + + pymisp diff --git a/docs/source/pymisp.rst b/docs/source/pymisp.rst new file mode 100644 index 0000000..28ca0d9 --- /dev/null +++ b/docs/source/pymisp.rst @@ -0,0 +1,22 @@ +pymisp package +============== + +Submodules +---------- + +pymisp.api module +----------------- + +.. automodule:: pymisp.api + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: pymisp + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/readme.rst b/docs/source/readme.rst new file mode 100644 index 0000000..7592303 --- /dev/null +++ b/docs/source/readme.rst @@ -0,0 +1 @@ +.. include:: ../../README.md diff --git a/examples/add_named_attribute.py b/examples/add_named_attribute.py new file mode 100644 index 0000000..43bb5db --- /dev/null +++ b/examples/add_named_attribute.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json', debug=True) + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Create an event on MISP.') + parser.add_argument("-e", "--event", type=int, help="The id of the event to update.") + parser.add_argument("-t", "--type", help="The type of the added attribute") + parser.add_argument("-v", "--value", help="The value of the attribute") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + event = misp.get_event(args.event) + event = misp.add_named_attribute(event, args.type, args.value) + print(event) diff --git a/examples/add_user.py b/examples/add_user.py new file mode 100755 index 0000000..fbec04e --- /dev/null +++ b/examples/add_user.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Add a new user by setting the mandory fields.') + parser.add_argument("-e", "--email", required=True, help="Email linked to the account.") + parser.add_argument("-o", "--org_id", required=True, help="Organisation linked to the user.") + parser.add_argument("-r", "--role_id", required=True, help="Role linked to the user.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + print (misp.add_user(args.email, args.org_id, args.role_id)) diff --git a/examples/add_user_json.py b/examples/add_user_json.py new file mode 100755 index 0000000..6f79cc1 --- /dev/null +++ b/examples/add_user_json.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Add the user described in the given json. If no file is provided, returns a json listing all the fields used to describe a user.') + parser.add_argument("-f", "--json_file", help="The name of the json file describing the user you want to create.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + if args.json_file is None: + print (misp.get_add_user_fields_list()) + else: + print(misp.add_user_json(args.json_file)) diff --git a/examples/copy_list.py b/examples/copy_list.py old mode 100644 new mode 100755 index b7a1a55..1e74ccd --- a/examples/copy_list.py +++ b/examples/copy_list.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # -*- coding: utf-8 -*- import sys @@ -27,25 +27,14 @@ def init(cert_to_priv=True): destination = PyMISP(url_cert, cert, cert_cert, 'xml') -def _to_utf8(request): - to_return = None - if 'json' in request.headers['content-type']: - to_return = request.json() - else: - to_return = request.text.encode('utf-8') - return to_return - - def copy_event(event_id): - r_src = source.get_event(event_id) - to_send = _to_utf8(r_src) - return destination.add_event(to_send) + e = source.get_event(event_id) + return destination.add_event(e) def update_event(event_id, event_to_update): - r_src = source.get_event(event_id) - to_send = _to_utf8(r_src) - return destination.update_event(event_to_update, to_send) + e = source.get_event(event_id) + return destination.update_event(event_to_update, e) def list_copy(filename): @@ -83,7 +72,7 @@ def copy(eventid): def export_our_org(): circl = source.search(org='CIRCL') - return _to_utf8(circl) + return circl if __name__ == '__main__': import argparse diff --git a/examples/create_events.py b/examples/create_events.py index 2f0e68a..f780511 100755 --- a/examples/create_events.py +++ b/examples/create_events.py @@ -13,20 +13,17 @@ except NameError: def init(url, key): - return PyMISP(url, key, True, 'json') + return PyMISP(url, key, True, 'json', debug=True) if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Send malware sample to MISP.') + parser = argparse.ArgumentParser(description='Create an event on MISP.') parser.add_argument("-d", "--distrib", type=int, help="The distribution setting used for the attributes and for the newly created event, if relevant. [0-3].") parser.add_argument("-i", "--info", help="Used to populate the event info field if no event ID supplied.") parser.add_argument("-a", "--analysis", type=int, help="The analysis level of the newly created event, if applicatble. [0-2]") - parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [0-3]") + parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [1-4]") args = parser.parse_args() misp = init(misp_url, misp_key) event = misp.new_event(args.distrib, args.threat, args.analysis, args.info) - print event - - response = misp.add_mutex(event, 'booh') - print response + print(event) diff --git a/examples/del.py b/examples/del.py new file mode 100755 index 0000000..0577353 --- /dev/null +++ b/examples/del.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + + +# Usage for pipe masters: ./last.py -l 5h | jq . + + +def init(url, key): + return PyMISP(url, key, True, 'json', debug=True) + + +def del_event(m, eventid): + result = m.delete_event(eventid) + print(result) + +def del_attr(m, attrid): + result = m.delete_attribute(attrid) + print(result) + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Delete an event from a MISP instance.') + parser.add_argument("-e", "--event", help="Event ID to delete.") + parser.add_argument("-a", "--attribute", help="Attribute ID to delete.") + + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + if args.event: + del_event(misp, args.event) + else: + del_attr(misp, args.attribute) diff --git a/examples/delete_user.py b/examples/delete_user.py new file mode 100755 index 0000000..b6aaf7d --- /dev/null +++ b/examples/delete_user.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Delete the user with the given id. Keep in mind that disabling users (by setting the disabled flag via an edit) is always prefered to keep user associations to events intact.') + parser.add_argument("-i", "--user_id", help="The id of the user you want to delete.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + print(misp.delete_user(args.user_id)) diff --git a/examples/edit_user.py b/examples/edit_user.py new file mode 100755 index 0000000..6d16ea9 --- /dev/null +++ b/examples/edit_user.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Edit the email of the user designed by the user_id.') + parser.add_argument("-i", "--user_id", required=True, help="The name of the json file describing the user you want to modify.") + parser.add_argument("-e", "--email", help="Email linked to the account.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + print(misp.edit_user(args.user_id, email=args.email)) diff --git a/examples/edit_user_json.py b/examples/edit_user_json.py new file mode 100755 index 0000000..7c5deb8 --- /dev/null +++ b/examples/edit_user_json.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Edit the user designed by the user_id. If no file is provided, returns a json listing all the fields used to describe a user.') + parser.add_argument("-i", "--user_id", required=True, help="The name of the json file describing the user you want to modify.") + parser.add_argument("-f", "--json_file", help="The name of the json file describing your modifications.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + if args.json_file is None: + print (misp.get_edit_user_fields_list(args.user_id)) + else: + print(misp.edit_user_json(args.json_file, args.user_id)) diff --git a/examples/et2misp.py b/examples/et2misp.py new file mode 100755 index 0000000..e45f395 --- /dev/null +++ b/examples/et2misp.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copy Emerging Threats Block IPs list to several MISP events +# Because of the large size of the list the first run will take a minute +# Running it again will update the MISP events if changes are detected +# +# This script requires PyMISP 2.4.50 or later + +import sys, json, time, requests +from pymisp import PyMISP +from keys import misp_url, misp_key + +et_url = 'https://rules.emergingthreats.net/fwrules/emerging-Block-IPs.txt' +et_str = 'Emerging Threats ' + +def init_misp(): + global mymisp + mymisp = PyMISP(misp_url, misp_key) + +def load_misp_event(eid): + global et_attr + global et_drev + global et_event + et_attr = {} + et_drev = {} + + et_event = mymisp.get(eid) + echeck(et_event) + for a in et_event['Event']['Attribute']: + if a['category'] == 'Network activity': + et_attr[a['value']] = a['id'] + continue + if a['category'] == 'Internal reference': + et_drev = a; + +def init_et(): + global et_data + global et_rev + requests.packages.urllib3.disable_warnings() + s = requests.Session() + r = s.get(et_url) + if r.status_code != 200: + raise Exception('Error getting ET data: {}'.format(r.text)) + name = '' + et_data = {} + et_rev = 0 + for line in r.text.splitlines(): + if line.startswith('# Rev '): + et_rev = int(line[6:]) + continue + if line.startswith('#'): + name = line[1:].strip() + if et_rev and not et_data.get(name): + et_data[name] = {} + continue + l = line.rstrip() + if l: + et_data[name][l] = name + +def update_et_event(name): + if et_drev and et_rev and int(et_drev['value']) < et_rev: + # Copy MISP attributes to new dict + et_ips = dict.fromkeys(et_attr.keys()) + + # Weed out attributes still in ET data + for k,v in et_data[name].items(): + et_attr.pop(k, None) + + # Delete the leftover attributes from MISP + for k,v in et_attr.items(): + r = mymisp.delete_attribute(v) + if r.get('errors'): + print "Error deleting attribute {} ({}): {}\n".format(v,k,r['errors']) + + # Weed out ips already in the MISP event + for k,v in et_ips.items(): + et_data[name].pop(k, None) + + # Add new attributes to MISP event + ipdst = [] + for i,k in enumerate(et_data[name].items(), 1-len(et_data[name])): + ipdst.append(k[0]) + if i % 100 == 0: + r = mymisp.add_ipdst(et_event, ipdst) + echeck(r, et_event['Event']['id']) + ipdst = [] + + # Update revision number + et_drev['value'] = et_rev + et_drev.pop('timestamp', None) + attr = [] + attr.append(et_drev) + + # Publish updated MISP event + et_event['Event']['Attribute'] = attr + et_event['Event']['published'] = False + et_event['Event']['date'] = time.strftime('%Y-%m-%d') + r = mymisp.publish(et_event) + echeck(r, et_event['Event']['id']) + +def echeck(r, eid=None): + if r.get('errors'): + if eid: + print "Processing event {} failed: {}".format(eid, r['errors']) + else: + print r['errors'] + sys.exit(1) + +if __name__ == '__main__': + init_misp() + init_et() + + for et_type in set(et_data.keys()): + info = et_str + et_type + r = mymisp.search_index(eventinfo=info) + if r['response']: + eid=r['response'][0]['id'] + else: # event not found, create it + new_event = mymisp.new_event(info=info, distribution=3, threat_level_id=4, analysis=1) + echeck(new_event) + eid=new_event['Event']['id'] + r = mymisp.add_internal_text(new_event, 1, comment='Emerging Threats revision number') + echeck(r, eid) + load_misp_event(eid) + update_et_event(et_type) diff --git a/examples/events/README.md b/examples/events/README.md new file mode 100644 index 0000000..e53e6d6 --- /dev/null +++ b/examples/events/README.md @@ -0,0 +1,53 @@ +## Explanation + +This folder contains scripts made to create dummy events in order to test MISP instances. + +* dummy is a containing text only file used as uploaded attachement. +* create\_dummy\_event.py will create a given number of events (default: 1)with a randomly generated domain|ip attribute as well as a copy of dummy file. +* create\_massive\_dummy\_events.py will create a given number of events (default: 1) with a given number of randomly generated attributes(default: 3000). + +### Tools description + +* randomStringGenerator: generate a random string of a given size, characters used to build the string can be chosen, default are characters from string.ascii\_lowercase and string.digits +* randomIpGenerator: generate a random ip + +* floodtxt: add a generated string as attribute of the given event. The added attributes can be of the following category/type: + - Internal reference/comment + - Internal reference/text + - Internal reference/other + - Payload delivery/email-subject + - Artifact dropped/mutex + - Artifact dropped/filename +* floodip: add a generated ip as attribute of the given event. The added attributes can be of the following category/type: + - Network activity/ip-src + - Network activity/ip.dst +* flooddomain: add a generated domain-like string as attribute of the given event. The added attributes can be of the following category/type: + - Network activity/hostname + - Network activity/domain +* flooddomainip: add a generated domain|ip-like string as attribute of the given event. The added attribute is of the following category/type: + - Network activity/domain|ip +* floodemail: add a generated email-like string as attribute of the given event. The added attributes can be of the following category/type: + - Payload delivery/email-src + - Payload delivery/email-dst +* floodattachmentent: add a dummy file as attribute of the given event. The added attribute is of the following category/type: + - Payload delivery/attachment + +* create\_dummy\_event: create a dummy event named "dummy event" with these caracteristics: + - Distribution: Your organisation only + - Analysis: Initial + - Threat Level: Undefined + - Number of Attributes: 2 + - Attribute: + - category/type: Network activity/domain|ip + - value: Randomly generated + - Attribute: + -category/type: Payload delivery/attachment + - value: 'dummy' file +* create\_massive\_dummy\_events: create a dummy event named "massive dummy event" with these caracteristics: + - Distribution: Your organisation only + - Analysis: Initial + - Threat Level: Undefined + - Number of Attributes: Given as argument + - Attribute: + - category/type: Randomly chosen + - value: Randomly generated or dummy file diff --git a/examples/events/create_dummy_event.py b/examples/events/create_dummy_event.py new file mode 100755 index 0000000..63bd581 --- /dev/null +++ b/examples/events/create_dummy_event.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +import argparse +import tools + +def init(url, key): + return PyMISP(url, key, misp_verifycert, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Create a given number of event containing an domain|ip attribute and an attachment each.') + parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + if args.limit is None: + args.limit = 1 + + for i in range(args.limit): + tools.create_dummy_event(misp) diff --git a/examples/events/create_massive_dummy_events.py b/examples/events/create_massive_dummy_events.py new file mode 100755 index 0000000..12a2826 --- /dev/null +++ b/examples/events/create_massive_dummy_events.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import url, key +import argparse +import tools + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Create a given number of event containing a given number of attributes eachh.') + parser.add_argument("-l", "--limit", type=int, help="Number of events to create (default 1)") + parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)") + args = parser.parse_args() + + misp = PyMISP(url, key, True, 'json') + + if args.limit is None: + args.limit = 1 + if args.attribute is None: + args.attribute = 3000 + + for i in range(args.limit): + tools.create_massive_dummy_events(misp, args.attribute) diff --git a/examples/events/dummy b/examples/events/dummy new file mode 100644 index 0000000..9834857 --- /dev/null +++ b/examples/events/dummy @@ -0,0 +1,21 @@ +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY +DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY DUMMY diff --git a/examples/events/tools.py b/examples/events/tools.py new file mode 100644 index 0000000..9d0e3f5 --- /dev/null +++ b/examples/events/tools.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import random +from random import randint +import string + + +def randomStringGenerator(size, chars=string.ascii_lowercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) + + +def randomIpGenerator(): + return str(randint(0, 255)) + '.' + str(randint(0, 255)) + '.' + str(randint(0, 255)) + '.' + str(randint(0, 255)) + + +def floodtxt(misp, event, maxlength=255): + text = randomStringGenerator(randint(1, maxlength)) + textfunctions = [misp.add_internal_comment, misp.add_internal_text, misp.add_internal_other, misp.add_email_subject, misp.add_mutex, misp.add_filename] + textfunctions[randint(0, 5)](event, text) + + +def floodip(misp, event): + ip = randomIpGenerator() + ipfunctions = [misp.add_ipsrc, misp.add_ipdst] + ipfunctions[randint(0, 1)](event, ip) + + +def flooddomain(misp, event, maxlength=25): + a = randomStringGenerator(randint(1, maxlength)) + b = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase) + domain = a + '.' + b + domainfunctions = [misp.add_hostname, misp.add_domain] + domainfunctions[randint(0, 1)](event, domain) + + +def flooddomainip(misp, event, maxlength=25): + a = randomStringGenerator(randint(1, maxlength)) + b = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase) + domain = a + '.' + b + ip = randomIpGenerator() + misp.add_domain_ip(event, domain, ip) + + +def floodemail(misp, event, maxlength=25): + a = randomStringGenerator(randint(1, maxlength)) + b = randomStringGenerator(randint(1, maxlength)) + c = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase) + email = a + '@' + b + '.' + c + emailfunctions = [misp.add_email_src, misp.add_email_dst] + emailfunctions[randint(0, 1)](event, email) + + +def floodattachment(misp, eventid, distribution, to_ids, category, comment, info, analysis, threat_level_id): + filename = randomStringGenerator(randint(1, 128)) + misp.upload_sample(filename, 'dummy', eventid, distribution, to_ids, category, comment, info, analysis, threat_level_id) + + +def create_dummy_event(misp): + event = misp.new_event(0, 4, 0, 'dummy event') + flooddomainip(misp, event) + floodattachment(misp, event['Event']['id'], event['Event']['distribution'], False, 'Payload delivery', '', event['Event']['info'], event['Event']['analysis'], event['Event']['threat_level_id']) + + +def create_massive_dummy_events(misp, nbattribute): + event = misp.new_event(0, 4, 0, 'massive dummy event') + eventid = event['Event']['id'] + functions = [floodtxt, floodip, flooddomain, flooddomainip, floodemail, floodattachment] + for i in range(nbattribute): + choice = randint(0, 5) + if choice == 5: + floodattachment(misp, eventid, event['Event']['distribution'], False, 'Payload delivery', '', event['Event']['info'], event['Event']['analysis'], event['Event']['threat_level_id']) + else: + functions[choice](misp, event) diff --git a/examples/feed-generator/generate.py b/examples/feed-generator/generate.py index 13229b9..2188d2a 100755 --- a/examples/feed-generator/generate.py +++ b/examples/feed-generator/generate.py @@ -1,53 +1,56 @@ -#!/usr/bin/python +#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import json import os from pymisp import PyMISP -from settings import url, key, ssl, outputdir, filters +from settings import url, key, ssl, outputdir, filters, valid_attribute_distribution_levels -objectsToSave = { - 'Orgc': { - 'fields': ['name', 'uuid'], - 'multiple': False, - }, - 'Tag': { - 'fields': ['name', 'colour', 'exportable'], - 'multiple': True, - }, - 'Attribute': { - 'fields': ['uuid', 'value', 'category', 'type', - 'comment', 'data', 'timestamp', - 'to_ids'], - 'multiple': True, - }, - } +objectsToSave = {'Orgc': {'fields': ['name', 'uuid'], + 'multiple': False, + }, + 'Tag': {'fields': ['name', 'colour', 'exportable'], + 'multiple': True, + }, + 'Attribute': {'fields': ['uuid', 'value', 'category', 'type', + 'comment', 'data', 'timestamp', 'to_ids'], + 'multiple': True, + }, + } fieldsToSave = ['uuid', 'info', 'threat_level_id', 'analysis', 'timestamp', 'publish_timestamp', 'published', 'date'] +valid_attribute_distributions = [] + def init(): - return PyMISP(url, key, ssl, 'json') + # If we have an old settings.py file then this variable won't exist + global valid_attribute_distributions + try: + valid_attribute_distributions = valid_attribute_distribution_levels + except: + valid_attribute_distributions = ['0', '1', '2', '3', '4', '5'] + return PyMISP(url, key, ssl) def saveEvent(misp, uuid): - try: - event = misp.get_event(uuid) - event = __cleanUpEvent(event) - event = json.dumps(event) - eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w') - eventFile.write(event) - eventFile.close() - except: + event = misp.get_event(uuid) + if not event.get('Event'): + print('Error while fetching event: {}'.format(event['message'])) sys.exit('Could not create file for event ' + uuid + '.') + event = __cleanUpEvent(event) + event = json.dumps(event) + eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w') + eventFile.write(event) + eventFile.close() def __cleanUpEvent(event): - temp = event.json() + temp = event event = {'Event': {}} __cleanupEventFields(event, temp) __cleanupEventObjects(event, temp) @@ -61,11 +64,20 @@ def __cleanupEventFields(event, temp): return event +def __blockAttributeByDistribution(attribute): + if attribute['distribution'] not in valid_attribute_distributions: + return True + return False + + def __cleanupEventObjects(event, temp): for objectType in objectsToSave.keys(): if objectsToSave[objectType]['multiple'] is True: if objectType in temp['Event']: for objectInstance in temp['Event'][objectType]: + if objectType is 'Attribute': + if __blockAttributeByDistribution(objectInstance): + continue tempObject = {} for field in objectsToSave[objectType]['fields']: if field in objectInstance.keys(): @@ -86,7 +98,8 @@ def saveManifest(manifest): manifestFile = open(os.path.join(outputdir, 'manifest.json'), 'w') manifestFile.write(json.dumps(manifest)) manifestFile.close() - except: + except Exception as e: + print(e) sys.exit('Could not create the manifest file.') @@ -95,8 +108,7 @@ def __addEventToManifest(event): for eventTag in event['EventTag']: tags.append({'name': eventTag['Tag']['name'], 'colour': eventTag['Tag']['colour']}) - return { - 'Orgc': event['Orgc'], + return {'Orgc': event['Orgc'], 'Tag': tags, 'info': event['info'], 'date': event['date'], @@ -108,10 +120,12 @@ def __addEventToManifest(event): if __name__ == '__main__': misp = init() - result = misp.get_index(None, filters) try: - events = result.json() - except: + r = misp.get_index(filters) + events = r['response'] + print(events[0]) + except Exception as e: + print(e) sys.exit("Invalid response received from MISP.") if len(events) == 0: sys.exit("No events returned.") @@ -121,8 +135,7 @@ if __name__ == '__main__': for event in events: saveEvent(misp, event['uuid']) manifest[event['uuid']] = __addEventToManifest(event) - print "Event " + str(counter) + "/" + str(total) + " exported." + print("Event " + str(counter) + "/" + str(total) + " exported.") counter += 1 saveManifest(manifest) - print 'Manifest saved. Feed creation completed.' - + print('Manifest saved. Feed creation completed.') diff --git a/examples/feed-generator/settings.py b/examples/feed-generator/settings.default.py similarity index 58% rename from examples/feed-generator/settings.py rename to examples/feed-generator/settings.default.py index 7901a87..b80ba93 100755 --- a/examples/feed-generator/settings.py +++ b/examples/feed-generator/settings.default.py @@ -21,3 +21,19 @@ outputdir = 'output' # tlp:white and/or feed-export but exclude anything tagged privint filters = {} + +# By default all attributes will be included in the feed generation +# Remove the levels that you do not wish to include in the feed +# Use this to further narrow down what gets exported, for example: +# Setting this to ['3', '5'] will exclude any attributes from the feed that +# are not exportable to all or inherit the event +# +# The levels are as follows: +# 0: Your Organisation Only +# 1: This Community Only +# 2: Connected Communities +# 3: All +# 4: Sharing Group +# 5: Inherit Event +valid_attribute_distribution_levels = ['0', '1', '2', '3', '4', '5'] + diff --git a/examples/get.py b/examples/get.py index 5da2d4e..d2be085 100755 --- a/examples/get.py +++ b/examples/get.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from pymisp import PyMISP -from keys import misp_url, misp_key,misp_verifycert +from keys import misp_url, misp_key, misp_verifycert import argparse import os import json @@ -10,22 +10,28 @@ import json # Usage for pipe masters: ./last.py -l 5h | jq . +proxies = { + 'http': 'http://127.0.0.1:8123', + 'https': 'http://127.0.0.1:8123', +} + +proxies = None + def init(url, key): - return PyMISP(url, key, misp_verifycert, 'json') + return PyMISP(url, key, misp_verifycert, 'json', proxies=proxies) def get_event(m, event, out=None): result = m.get_event(event) - r = result.json() if out is None: - print(json.dumps(r) + '\n') + print(json.dumps(result) + '\n') else: with open(out, 'w') as f: - f.write(json.dumps(r) + '\n') - + f.write(json.dumps(result) + '\n') if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Get an event from a MISP instance.') parser.add_argument("-e", "--event", required=True, help="Event ID to get.") parser.add_argument("-o", "--output", help="Output file") diff --git a/examples/get_network_activity.py b/examples/get_network_activity.py index e854393..03a1c1c 100755 --- a/examples/get_network_activity.py +++ b/examples/get_network_activity.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # -*- coding: utf-8 -*- """ @@ -48,41 +48,34 @@ def get_event(event_id): event_id = int(event_id) if event_id > 0: - event = source.get_event(event_id) - if event.status_code == 200: + event_json = source.get_event(event_id) + event_core = event_json["Event"] + # event_threatlevel_id = event_core["threat_level_id"] - try: - event_json = event.json() - except: - return False + # attribute_count = event_core["attribute_count"] + attribute = event_core["Attribute"] - event_core = event_json["Event"] - # event_threatlevel_id = event_core["threat_level_id"] + for attribute in event_core["Attribute"]: + if app_ids_only and not attribute["to_ids"]: + continue - # attribute_count = event_core["attribute_count"] - attribute = event_core["Attribute"] - - for attribute in event_core["Attribute"]: - if app_ids_only and not attribute["to_ids"]: - continue - - value = attribute["value"] - title = event_core["info"] - if app_netflow: - app_printcomment = False - if attribute["type"] == "ip-dst" and app_ip_dst: - network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")]) + value = attribute["value"] + title = event_core["info"] + if app_netflow: + app_printcomment = False + if attribute["type"] == "ip-dst" and app_ip_dst: + network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")]) + else: + if attribute["type"] == "ip-src" and app_ip_src: + network_ip_src.append([build_entry(value, event_id, title, "ip-src")]) + elif attribute["type"] == "ip-dst" and app_ip_dst: + network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")]) + elif attribute["type"] == "domain" and app_domain: + network_domain.append([build_entry(value, event_id, title, "domain")]) + elif attribute["type"] == "hostname" and app_hostname: + network_hostname.append([build_entry(value, event_id, title, "hostname")]) else: - if attribute["type"] == "ip-src" and app_ip_src: - network_ip_src.append([build_entry(value, event_id, title, "ip-src")]) - elif attribute["type"] == "ip-dst" and app_ip_dst: - network_ip_dst.append([build_entry(value, event_id, title, "ip-dst")]) - elif attribute["type"] == "domain" and app_domain: - network_domain.append([build_entry(value, event_id, title, "domain")]) - elif attribute["type"] == "hostname" and app_hostname: - network_hostname.append([build_entry(value, event_id, title, "hostname")]) - else: - continue + continue else: print("Not a valid ID") return @@ -121,8 +114,8 @@ def print_events(): if firsthost: firsthost = False else: - print " or " - print "host %s" % ip[0] + print(" or ") + print("host %s" % ip[0]) else: if app_ip_src: for ip in network_ip_src: diff --git a/examples/graphdb/make_neo4j.py b/examples/graphdb/make_neo4j.py new file mode 100755 index 0000000..6393813 --- /dev/null +++ b/examples/graphdb/make_neo4j.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from pymisp import Neo4j +from pymisp import MISPEvent +from keys import misp_url, misp_key +import argparse + +""" +Sample Neo4J query: + + +MATCH ()-[r:has]->(n) +WITH n, count(r) as rel_cnt +WHERE rel_cnt > 5 +MATCH (m)-[r:has]->(n) +RETURN m, n LIMIT 200; +""" + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Get all the events matching a value.') + parser.add_argument("-s", "--search", required=True, help="String to search.") + parser.add_argument("--host", default='localhost:7474', help="Host where neo4j is running.") + parser.add_argument("-u", "--user", default='neo4j', help="User on neo4j.") + parser.add_argument("-p", "--password", default='neo4j', help="Password on neo4j.") + parser.add_argument("-d", "--deleteall", action="store_true", default=False, help="Delete all nodes from the database") + args = parser.parse_args() + + neo4j = Neo4j(args.host, args.user, args.password) + if args.deleteall: + neo4j.del_all() + misp = PyMISP(misp_url, misp_key) + result = misp.search_all(args.search) + for json_event in result['response']: + if not json_event['Event']: + print(json_event) + continue + print('Importing', json_event['Event']['info'], json_event['Event']['id']) + try: + misp_event = MISPEvent() + misp_event.load(json_event) + neo4j.import_event(misp_event) + except: + print('broken') diff --git a/examples/ioc-2-misp/README.md b/examples/ioc-2-misp/README.md new file mode 100644 index 0000000..60412f6 --- /dev/null +++ b/examples/ioc-2-misp/README.md @@ -0,0 +1,25 @@ +### Description + +Python script for ioc import to misp + +### requires + +> python 2.7 +> PyMISP +> BeautifulSoup (apt-get install python-bs4 python-lxml) + +### Usage + +```bash +python ioc2misp.py -i myioc -t "tag:mytag='sample','tag:other='foo'" +``` + +```bash +time find /iocsample -type f|while read line ;do python ioc2misp.py -i ${line};done +``` + +### Conf + + * rename keys.py.sample as keys.py + * add your url and api key in keys.py + * use command in terminal diff --git a/examples/ioc-2-misp/ioc2misp.py b/examples/ioc-2-misp/ioc2misp.py new file mode 100755 index 0000000..a7bc458 --- /dev/null +++ b/examples/ioc-2-misp/ioc2misp.py @@ -0,0 +1,337 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Format description +# @variables : camelCase +# @functions : snake_case + +from keys import mispUrl, mispKey, csvTaxonomyFile, iocMispMapping + +try: + from pymisp import PyMISP +except: + print("you need pymisp form github") + import sys + sys.exit(1) + +import os +import argparse + +try: + from bs4 import BeautifulSoup +except: + print("install BeautifulSoup : sudo apt-get install python-bs4 python-lxml") + import sys + sys.exit(1) + + +def misp_init(url, key): + return PyMISP(url, key, False, 'json') + + +def check_valid_ioc(): + + (filepath, filename) = os.path.split(iocDescriptions["iocfile"]) + (shortname, extension) = os.path.splitext(filename) + + if (("ioc" in extension)) and (sum(1 for _ in open(iocDescriptions["iocfile"])) > 1): + iocDescriptions['filename'] = filename + return True + return False + + +def get_parse_ioc_file(): + return BeautifulSoup(open(iocDescriptions["iocfile"]), "lxml") + + +def parse_ioc_search_content(iocContextSearch): + for k, v in iocMispMapping.items(): + if str(k).lower() == str(iocContextSearch).lower(): + return v + return False + + +def create_attribute_json(iocContextSearch, attributeValue, attributeComment, force=False): + ##################################### + # force used for description to upload + if force: + parseResult = ("Other", "comment") + else: + parseResult = parse_ioc_search_content(iocContextSearch) + + if parseResult is False: + + print("/!\ Not implemented :: {0} :: {1} :: Item add as 'Other','Comment'. Add it in your keys.py".format(iocContextSearch, attributeValue)) + ######################################## + # force import to misp + parseResult = ("Other", "comment") + + comment = "" + try: + comment = parseResult[2] + attributeComment + except: + comment = attributeComment + + attribute = {"category": parseResult[0], + "type": parseResult[1], + "value": attributeValue, + "timestamp": "0", + "to_ids": "0", + "distribution": "0", + "comment": comment + } + return attribute + + +def create_attributes_from_ioc_json(soup): + attributes = [] + + IndicatorItemValues = {} + for item in soup.find_all("indicatoritem"): + + if item.find('context'): + IndicatorItemValues["context"] = str(item.find('context')['search']) + else: + IndicatorItemValues["context"] = "" + if item.find('content'): + IndicatorItemValues["content"] = str(item.find('content').text) + else: + IndicatorItemValues["content"] = "" + if item.find('comment'): + IndicatorItemValues["comment"] = str(item.find('comment').text) + else: + IndicatorItemValues["comment"] = "" + + jsonAttribute = create_attribute_json(IndicatorItemValues["context"], IndicatorItemValues["content"], IndicatorItemValues["comment"]) + attributes.append(jsonAttribute) + + return attributes + + +def create_misp_event_json(attributes): + import time + if iocDescriptions["authored_by"]: + attributes.append(create_attribute_json(None, "authored_by", iocDescriptions["authored_by"], True)) + if iocDescriptions["authored_date"]: + attributes.append(create_attribute_json(None, "authored_date", iocDescriptions["authored_date"], True)) + + ################################################## + # make short-description in "info field + # if not exist make description + # if "info"="short-description" make descrption as comment + mispInfoFild = "" + if iocDescriptions["short_description"]: + mispInfoFild = iocDescriptions["short_description"] + if iocDescriptions["description"]: + attributes.append(create_attribute_json(None, "description", iocDescriptions["description"], True)) + else: + if iocDescriptions["description"]: + mispInfoFild = iocDescriptions["description"] + else: + mispInfoFild = "No description or short_description from IOC find." + + eventJson = {"Event": {"info": mispInfoFild, + "timestamp": "1", + "attribute_count": 0, + "analysis": "0", + "date": time.strftime("%Y-%m-%d"), + "org": "", + "distribution": "0", + "Attribute": [], + "proposal_email_lock": False, + "threat_level_id": "4", + }} + + eventJson["Event"]["Attribute"] = attributes + + return eventJson + + +def get_descriptions(soup, description): + if soup.find(description.lower()): + return soup.find(description.lower()).text + return "" + + +def save_ioc_description(soup): + list_description = ["short_description", "authored_by", "authored_date", "description"] + + for description in list_description: + iocDescriptions[description] = get_descriptions(soup, description) + + return + + +def get_taxonomy(soup): + import csv + taxonomy = [] + reader = csv.reader(open(csvTaxonomyFile, 'rb'), delimiter=';') + ##################################### + # save file in a dict + # r[0] = @link from csv + # r[1] = @value from csv + # = value + # r[2] = @keep + # 0 : don't creat tag + # 1 : tag created + # r[3] = @taxonomy + + csvdic = {i: r for i, r in enumerate(reader)} + + ######################################### + # find all link with soup + for n in soup.find_all('link', rel=True): + rel = str(n.attrs['rel'][0]).lower() + + ########################## + # build special taxo + # special string because link if a html value + relValue = str(n.next_sibling).strip() + if rel == 'family': + if len(relValue) > 0: + taxonomy.append("malware_classification:malware-family='" + relValue + "'") + elif rel == 'threatgroup': + if len(relValue) > 0: + taxonomy.append("malware_classification:malware-threatgroup='" + relValue + "'") + + ######################### + # build taxo from csv match + else: + taxo = [r[3] for r in {i: r for i, r in csvdic.items() if r[0].lower() == rel and str(r[2]) == "1"}.values() if r[1].lower() == relValue.lower() and str(r[2]) == "1"] + + # taxo find in correspondance file + if (len(taxo) > 0 and taxo[0] != ''): + taxonomy.append(taxo[0]) + # not find + return taxonomy + + +def custum_color_tag(tagg): + color = "#00ace6" + if ":amber" in tagg: + color = "#ffc200" + if ":green:" in tagg: + color = "#009933" + if "tlp:green" in tagg: + color = "#009933" + if ":red:" in tagg: + color = "#ff0000" + if "tlp:red" in tagg: + color = "#ff0000" + if "tlp:white" in tagg: + color = "#fafafa" + return color + + +def push_event_to_misp(jsonEvent): + global misp + + #################### + # upload json event + event = misp.add_event(jsonEvent) + + # save event id for file upload and tagg + iocDescriptions["misp_event_id"] = event["Event"]["id"] + + return + + +def upload_file(): + + # filename,path, eid, distrib, ids, categ, info, ids, analysis, threat + misp.upload_sample(iocDescriptions['filename'], + iocDescriptions["iocfile"], + iocDescriptions["misp_event_id"], + "0", + False, + "External analysis", + iocDescriptions["short_description"], + None, + "1", + "4", + ) + return + + +def update_tag(listOfTagg): + for tagg in listOfTagg: + color = custum_color_tag(tagg) + + ############################# + # creatz tag in MISP + + misp.new_tag(str(tagg), str(color)) + ############################# + # link tag to MISP event + toPost = {} + toPost['Event'] = {'id': iocDescriptions["misp_event_id"]} + misp.add_tag(toPost, str(tagg)) + return + + +def main(): + global misp + global iocDescriptions + iocDescriptions = {} + + ################################ + # parse for valid argments + parser = argparse.ArgumentParser(description='Get an event from a MISP instance.') + parser.add_argument("-i", "--input", required=True, help="Input file") + parser.add_argument("-t", "--tag", help="Add custom tags 'tlp:red,cossi:tmp=test'") + args = parser.parse_args() + + iocDescriptions["iocfile"] = os.path.abspath(args.input) + + ################################ + # check if file have ioc extention and if he is not empty + if check_valid_ioc(): + + ################################ + # Try to parse file + iocfileparse = get_parse_ioc_file() + else: + print("/!\ Bad format {0}".format(iocDescriptions["iocfile"])) + return + + ################################ + # save description for create event + save_ioc_description(iocfileparse) + + ################################ + # parse ioc and buid json attributes + jsonAttributes = create_attributes_from_ioc_json(iocfileparse) + + ################################ + # create a json misp event and append attributes + jsonEvent = create_misp_event_json(jsonAttributes) + + ################################ + # try connection + try: + misp = misp_init(mispUrl, mispKey) + except: + print("/!\ Connection fail, bad url ({0}) or API key : {1}".format(mispUrl, mispKey)) + return + + ################################ + # Add event to MSIP + push_event_to_misp(jsonEvent) + + ################################ + # Upload the IOC file and close tmpfile + upload_file() + + ################################ + # Update MISP Event with tag from IOC + update_tag(get_taxonomy(iocfileparse)) + + ################################ + # Add custom Tag (-t) + if args.tag: + customTag = args.tag + update_tag(customTag.split(",")) + + +if __name__ == '__main__': + main() diff --git a/examples/ioc-2-misp/keys.py.sample b/examples/ioc-2-misp/keys.py.sample new file mode 100644 index 0000000..5b73563 --- /dev/null +++ b/examples/ioc-2-misp/keys.py.sample @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +mispUrl = '' +mispKey = '' + +############################### +# file use for internal tag +# some sample can be find here : +# https://github.com/eset/malware-ioc +# https://github.com/fireeye/iocs +csvTaxonomyFile = "taxonomy.csv" + +# csv delimiter : ";" with quotechar : " + +############################### +# link sample + #~ + #~ APT + #~ APT12 + #~ Backdoor + #~ Apache 2.0 + #~ + +# @link from csv +# = rel attribut from +# @value from csv +# = value +# @keep +# 0 : don't create tag +# 1 : tag created +# @taxonomy +# define tag for misp +# @comment +# litte description but not use + + +######################################### +# https://www.circl.lu/doc/misp/categories-and-types/index.html +# /\ +# || +# || +# \/ +# http://schemas.mandiant.com/ + +# @index = Context/search form ioc +# @(1, 2, 3) +# 1. categorie mapping +# 2. type mapping +# 3. optionnal comment + + +iocMispMapping = { + + ('DriverItem/DriverName') : (u'Artifacts dropped',u'other', u'DriverName. '), + + ('DnsEntryItem/Host') : (u'Network activity',u'domain'), + + ('Email/To') : (u'Targeting data',u'target-email'), + ('Email/Date') : (u'Other',u'comment',u'EmailDate. '), + ('Email/Body') : (u'Payload delivery',u'email-subject'), + ('Email/From') : (u'Payload delivery',u'email-dst'), + ('Email/Subject') : (u'Payload delivery',u'email-subject'), + ('Email/Attachment/Name') : (u'Payload delivery',u'email-attachment'), + + ('FileItem/Md5sum') : (u'External analysis',u'md5'), + ('FileItem/Sha1sum') : (u'External analysis',u'sha1'), + ('FileItem/FileName') : (u'External analysis',u'filename'), + ('FileItem/FullPath') : (u'External analysis',u'filename'), + ('FileItem/FilePath') : (u'External analysis',u'filename'), + ('FileItem/Sha256sum') : (u'External analysis',u'sha256'), + + ('Network/URI') : (u'Network activity',u'uri'), + ('Network/DNS') : (u'Network activity',u'domain'), + ('Network/String') : (u'Network activity',u'ip-dst'), + ('Network/UserAgent') : (u'Network activity',u'user-agent'), + + ('PortItem/localIP') : (u'Network activity',u'ip-dst'), + + ('ProcessItem/name') : (u'External analysis',u'pattern-in-memory', u'ProcessName. '), + ('ProcessItem/path') : (u'External analysis',u'pattern-in-memory', u'ProcessPath. '), + ('ProcessItem/Mutex') : (u'Artifacts dropped',u'mutex', u'mutex'), + ('ProcessItem/Pipe/Name') : (u'Artifacts dropped',u'named pipe'), + ('ProcessItem/Mutex/Name') : (u'Artifacts dropped',u'mutex', u'MutexName. '), + + ('RegistryItem/Text') : (u'Artifacts dropped',u'regkey', u'RegistryText. '), + ('RegistryItem/Path') : (u'Artifacts dropped',u'regkey', u'RegistryPath. '), + + ('ServiceItem/name') : (u'Artifacts dropped',u'windows-service-name'), + ('ServiceItem/type') : (u'Artifacts dropped',u'pattern-in-memory', u'ServiceType. '), + + ('Snort/Snort') : (u'Network activity',u'snort'), + + } diff --git a/examples/ioc-2-misp/taxonomy.csv b/examples/ioc-2-misp/taxonomy.csv new file mode 100644 index 0000000..73ac977 --- /dev/null +++ b/examples/ioc-2-misp/taxonomy.csv @@ -0,0 +1,12 @@ +link,value,keep,taxonomy,comment +classification,TLP AMBER,1,tlp:amber, +classification,TLP GREEN,1,tlp:green, +confidential,TLP-AMBER,1,tlp:amber, +confidential,TLP GREEN,1,tlp:green, +confidential,TLP-GREEN,1,tlp:green, +confidential,TLP RED,1,tlp:red, +exportable,Yes,0,, +family,APT,1,malware_classification:malware-category='APT', +family,APT3,1,malware_classification:malware-category='APT3',https://github.com/fireeye/iocs/tree/master/APT3 +license,Apache 2.0,0,, +threatcategory,APT3,1,malware_classification:malware-category='APT3',https://github.com/fireeye/iocs/tree/master/APT3 diff --git a/examples/last.py b/examples/last.py index 5eab820..75f8162 100755 --- a/examples/last.py +++ b/examples/last.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from pymisp import PyMISP -from keys import misp_url, misp_key,misp_verifycert +from keys import misp_url, misp_key, misp_verifycert import argparse import os import json diff --git a/examples/sharing_groups.py b/examples/sharing_groups.py new file mode 100644 index 0000000..3bf4fa9 --- /dev/null +++ b/examples/sharing_groups.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Get a list of the sharing groups from the MISP instance.') + + misp = init(misp_url, misp_key) + + sharing_groups = misp.get_sharing_groups() + print sharing_groups + diff --git a/examples/sighting.json b/examples/sighting.json new file mode 100644 index 0000000..9191a5b --- /dev/null +++ b/examples/sighting.json @@ -0,0 +1,2 @@ +{"values":["www.google.com", "8.8.8.8"], "timestamp":1460558710} + diff --git a/examples/sighting.py b/examples/sighting.py new file mode 100755 index 0000000..10bd72d --- /dev/null +++ b/examples/sighting.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Add sighting.') + parser.add_argument("-f", "--json_file", required=True, help="The name of the json file describing the attribute you want to add sighting to.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + misp.sighting_per_json(args.json_file) diff --git a/examples/situational-awareness/README.md b/examples/situational-awareness/README.md new file mode 100644 index 0000000..d481f76 --- /dev/null +++ b/examples/situational-awareness/README.md @@ -0,0 +1,33 @@ +## Explanation + +* treemap.py is a script that will generate an interactive svg (attribute\_treemap.svg) containing a treepmap representing the distribution of attributes in a sample (data) fetched from the instance using "last" or "searchall" examples. +* It will also generate a html document with a table (attribute\_table.html) containing count for each type of attribute. +* test\_attribute\_treemap.html is a quick page made to visualize both treemap and table at the same time. + +* tags\_count.py is a script that count the number of occurences of every tags in a fetched sample of Events in a given period of time. +* tag\_search.py is a script that count the number of occurences of a given tag in a fetched sample of Events in a given period of time. + * Events will be fetched from _days_ days ago to today. + * _begindate_ is the beginning of the studied period. If it is later than today, an error will be raised. + * _enddate_ is the end of the studied period. If it is earlier than _begindate_, an error will be raised. + * tag\_search.py allows research for multiple tags is possible by separating each tag by the | symbol. + * Partial research is also possible with tag\_search.py. For instance, search for "ransom" will also return tags containin "ransomware". + +* tags\_to\_graphs.py is a script that will generate several plots to visualise tags distribution. + * The studied _period_ can be either the 7, 28 or 360 last days + * _accuracy_ allows to get smallers splits of data instead of the default values + * _order_ define the accuracy of the curve fitting. Default value is 3 + * It will generate two plots comparing all the tags: + * tags_repartition_plot that present the raw data + * tags_repartition_trend_plot that present the general evolution for each tag + * Then each taxonomies will be represented in three plots: + * Raw datas: in "plot" folder, named with the name of the corresponding taxonomy + * Trend: in "plot" folder, named _taxonomy_\_trend. general evolution of the data (linear fitting, curve fitting at order 1) + * Curve fitting: in "plotlib" folder, name as the taxonomy it presents. + * In order to visualize the last plots, a html file is also generated automaticaly (might be improved in the future) + +:warning: These scripts are not time optimised + +## Requierements + +* [Pygal](https://github.com/Kozea/pygal/) +* [Matplotlib](https://github.com/matplotlib/matplotlib) diff --git a/examples/situational-awareness/attribute_treemap.py b/examples/situational-awareness/attribute_treemap.py new file mode 100755 index 0000000..33ab6b5 --- /dev/null +++ b/examples/situational-awareness/attribute_treemap.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +import argparse +import tools + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Take a sample of events (based on last.py of searchall.py) and create a treemap epresenting the distribution of attributes in this sample.') + parser.add_argument("-f", "--function", required=True, help='The parameter can be either set to "last" or "searchall". If the parameter is not valid, "last" will be the default setting.') + parser.add_argument("-a", "--argument", required=True, help='if function is "last", time can be defined in days, hours, minutes (for example 5d or 12h or 30m). Otherwise, this argument is the string to search') + + args = parser.parse_args() + + misp = PyMISP(misp_url, misp_key, misp_verifycert, 'json') + + if args.function == "searchall": + result = misp.search_all(args.argument) + else: + result = misp.download_last(args.argument) + + if 'response' in result: + events = tools.eventsListBuildFromArray(result) + attributes = tools.attributesListBuild(events) + temp = tools.getNbAttributePerEventCategoryType(attributes) + temp = temp.groupby(level=['category', 'type']).sum() + tools.createTreemap(temp, 'Attributes Distribution', 'attribute_treemap.svg', 'attribute_table.html') + else: + print ('There is no event answering the research criteria') diff --git a/examples/situational-awareness/style.css b/examples/situational-awareness/style.css new file mode 100644 index 0000000..ce23448 --- /dev/null +++ b/examples/situational-awareness/style.css @@ -0,0 +1,50 @@ +body +{ + /*font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;*/ + font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace; +} + +h1 +{ + font-size: 16px; + width: 290px; + text-align:center; +} + +/*** Stats Tables ***/ + +table +{ + border-collapse: collapse; + border-spacing: 0; + border: 1px solid #cbcbcb; +} + +tbody +{ + font-size:12px; +} + +table td +{ + border-left: 1px solid #cbcbcb; + border-width: 0 0 0 1px; + width: 500px; + margin: 0; + padding: 0.5em 1em; +} + +.test +{ + width: 500px; +} + +table tr:nth-child(2n-1) td +{ + background-color: #f2f2f2; +} + +table tr td:first-child +{ + font-weight: bold; +} diff --git a/examples/situational-awareness/style2.css b/examples/situational-awareness/style2.css new file mode 100644 index 0000000..6fcec41 --- /dev/null +++ b/examples/situational-awareness/style2.css @@ -0,0 +1,41 @@ +body +{ + /*font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;*/ + font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace; +} + +h1 +{ + font-size: 16px; + width: 290px; + text-align:center; +} + +/*** Stats Tables ***/ + +table +{ + border-collapse: collapse; + border-spacing: 0; + table-layout: fixed; + width: 6000px; + border: 1px solid #cbcbcb; +} + +tbody +{ + font-size:12px; +} + +td +{ + border-left: 1px solid #cbcbcb; + border-width: 0 0 0 1px; + margin: 0; + padding: 0.5em 1em; +} + +table tr td:first-child +{ + font-weight: bold; +} diff --git a/examples/situational-awareness/tag_search.py b/examples/situational-awareness/tag_search.py new file mode 100644 index 0000000..20d422d --- /dev/null +++ b/examples/situational-awareness/tag_search.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +from datetime import datetime +import argparse +import tools + + +def init(url, key): + return PyMISP(url, key, misp_verifycert, 'json') + +# ######### fetch data ########## + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Take a sample of events (based on last.py) and give the number of occurrence of the given tag in this sample.') + parser.add_argument("-t", "--tag", required=True, help="tag to search (search for multiple tags is possible by using |. example : \"osint|OSINT\")") + parser.add_argument("-d", "--days", type=int, help="number of days before today to search. If not define, default value is 7") + parser.add_argument("-b", "--begindate", help="The research will look for tags attached to events posted at or after the given startdate (format: yyyy-mm-dd): If no date is given, default time is epoch time (1970-1-1)") + parser.add_argument("-e", "--enddate", help="The research will look for tags attached to events posted at or before the given enddate (format: yyyy-mm-dd): If no date is given, default time is now()") + + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + if args.days is None: + args.days = 7 + result = misp.search(last='{}d'.format(args.days), metadata=True) + + tools.checkDateConsistancy(args.begindate, args.enddate, tools.getLastdate(args.days)) + + if args.begindate is None: + args.begindate = tools.getLastdate(args.days) + else: + args.begindate = tools.setBegindate(tools.toDatetime(args.begindate), tools.getLastdate(args.days)) + + if args.enddate is None: + args.enddate = datetime.now() + else: + args.enddate = tools.setEnddate(tools.toDatetime(args.enddate)) + + if 'response' in result: + events = tools.selectInRange(tools.eventsListBuildFromArray(result), begin=args.begindate, end=args.enddate) + totalPeriodEvents = tools.getNbitems(events) + tags = tools.tagsListBuild(events) + result = tools.isTagIn(tags, args.tag) + totalPeriodTags = len(result) + + text = 'Studied pediod: from ' + if args.begindate is None: + text = text + '1970-01-01' + else: + text = text + str(args.begindate.date()) + text = text + ' to ' + if args.enddate is None: + text = text + str(datetime.now().date()) + else: + text = text + str(args.enddate.date()) + + print('\n========================================================') + print(text) + print('During the studied pediod, ' + str(totalPeriodTags) + ' events out of ' + str(totalPeriodEvents) + ' contains at least one tag with ' + args.tag + '.') + if totalPeriodEvents != 0: + print('It represents {}% of the events in this period.'.format(round(100 * totalPeriodTags / totalPeriodEvents, 3))) + else: + print ('There is no event answering the research criteria') + diff --git a/examples/situational-awareness/tags_count.py b/examples/situational-awareness/tags_count.py new file mode 100644 index 0000000..c58ca5b --- /dev/null +++ b/examples/situational-awareness/tags_count.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +from datetime import datetime +import argparse +import tools + + +def init(url, key): + return PyMISP(url, key, misp_verifycert, 'json') + +# ######### fetch data ########## + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Take a sample of events (based on last.py) and give the repartition of tags in this sample.') + parser.add_argument("-d", "--days", type=int, help="number of days before today to search. If not define, default value is 7") + parser.add_argument("-b", "--begindate", default='1970-01-01', help="The research will look for tags attached to events posted at or after the given startdate (format: yyyy-mm-dd): If no date is given, default time is epoch time (1970-1-1)") + parser.add_argument("-e", "--enddate", help="The research will look for tags attached to events posted at or before the given enddate (format: yyyy-mm-dd): If no date is given, default time is now()") + + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + if args.days is None: + args.days = 7 + result = misp.search(last='{}d'.format(args.days), metadata=True) + + tools.checkDateConsistancy(args.begindate, args.enddate, tools.getLastdate(args.days)) + + if args.begindate is None: + args.begindate = tools.getLastdate(args.days) + else: + args.begindate = tools.setBegindate(tools.toDatetime(args.begindate), tools.getLastdate(args.days)) + + if args.enddate is None: + args.enddate = datetime.now() + else: + args.enddate = tools.setEnddate(tools.toDatetime(args.enddate)) + + if 'response' in result: + events = tools.selectInRange(tools.eventsListBuildFromArray(result), begin=args.begindate, end=args.enddate) + tags = tools.tagsListBuild(events) + result = tools.getNbOccurenceTags(tags) + else: + result = 'There is no event during the studied period' + + text = 'Studied pediod: from ' + if args.begindate is None: + text = text + '1970-01-01' + else: + text = text + str(args.begindate.date()) + text = text + ' to ' + if args.enddate is None: + text = text + str(datetime.now().date()) + else: + text = text + str(args.enddate.date()) + + print('\n========================================================') + print(text) + print(result) diff --git a/examples/situational-awareness/tags_to_graphs.py b/examples/situational-awareness/tags_to_graphs.py new file mode 100644 index 0000000..76464a4 --- /dev/null +++ b/examples/situational-awareness/tags_to_graphs.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +import argparse +import tools + + +def formattingDataframe(dataframe, dates, NanValue): + dataframe.reverse() + dates.reverse() + dataframe = tools.concat(dataframe) + dataframe = tools.renameColumns(dataframe, dates) + dataframe = tools.replaceNaN(dataframe, 0) + return dataframe + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Show the evolution of trend of tags.') + parser.add_argument("-p", "--period", help='Define the studied period. Can be the past year (y), month (m) or week (w). Week is the default value if no valid value is given.') + parser.add_argument("-a", "--accuracy", help='Define the accuracy of the splits on the studied period. Can be per month (m) -for year only-, week (w) -month only- or day (d). The default value is always the biggest available.') + parser.add_argument("-o", "--order", type=int, help='Define the accuracy of the curve fitting. Default value is 3') + + args = parser.parse_args() + + misp = PyMISP(misp_url, misp_key, misp_verifycert) + + if args.period == "y": + if args.accuracy == "d": + split = 360 + size = 1 + else: + split = 12 + size = 30 + last = '360d' + title = 'Tags repartition over the last 360 days' + elif args.period == "m": + if args.accuracy == "d": + split = 28 + size = 1 + else: + split = 4 + size = 7 + last = '28d' + title = 'Tags repartition over the last 28 days' + else: + split = 7 + size = 1 + last = '7d' + title = 'Tags repartition over the last 7 days' + + result = misp.search(last=last, metadata=True) + if 'response' in result: + events = tools.eventsListBuildFromArray(result) + result = [] + dates = [] + enddate = tools.getToday() + colourDict = {} + faketag = False + + for i in range(split): + begindate = tools.getNDaysBefore(enddate, size) + dates.append(str(enddate.date())) + eventstemp = tools.selectInRange(events, begin=begindate, end=enddate) + if eventstemp is not None: + tags = tools.tagsListBuild(eventstemp) + if tags is not None: + tools.createDictTagsColour(colourDict, tags) + result.append(tools.getNbOccurenceTags(tags)) + else: + result.append(tools.createFakeEmptyTagsSeries()) + faketag = True + else: + result.append(tools.createFakeEmptyTagsSeries()) + faketag = True + enddate = begindate + + result = formattingDataframe(result, dates, 0) + if faketag: + result = tools.removeFaketagRow(result) + + taxonomies, emptyOther = tools.getTaxonomies(tools.getCopyDataframe(result)) + + tools.tagsToLineChart(tools.getCopyDataframe(result), title, dates, colourDict) + tools.tagstrendToLineChart(tools.getCopyDataframe(result), title, dates, split, colourDict) + tools.tagsToTaxoLineChart(tools.getCopyDataframe(result), title, dates, colourDict, taxonomies, emptyOther) + tools.tagstrendToTaxoLineChart(tools.getCopyDataframe(result), title, dates, split, colourDict, taxonomies, emptyOther) + if args.order is None: + args.order = 3 + tools.tagsToPolyChart(tools.getCopyDataframe(result), split, colourDict, taxonomies, emptyOther, args.order) + tools.createVisualisation(taxonomies) + + else: + print('There is no event during the studied period') diff --git a/examples/situational-awareness/test_attribute_treemap.html b/examples/situational-awareness/test_attribute_treemap.html new file mode 100644 index 0000000..0bc9c72 --- /dev/null +++ b/examples/situational-awareness/test_attribute_treemap.html @@ -0,0 +1,26 @@ + + + + + + + + +
+ + + diff --git a/examples/situational-awareness/tools.py b/examples/situational-awareness/tools.py new file mode 100644 index 0000000..694eb2b --- /dev/null +++ b/examples/situational-awareness/tools.py @@ -0,0 +1,495 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from json import JSONDecoder +import random +import pygal +from pygal.style import Style +import pandas +from datetime import datetime +from datetime import timedelta +from dateutil.parser import parse +import numpy +from scipy import stats +from pytaxonomies import Taxonomies +import re +import matplotlib.pyplot as plt +from matplotlib import pylab +import os + + +class DateError(Exception): + def __init__(self, value): + self.value = value + + def __str__(self): + return repr(self.value) + + +# ############### Date Tools ################ + +def dateInRange(datetimeTested, begin=None, end=None): + if begin is None: + begin = datetime(1970, 1, 1) + if end is None: + end = datetime.now() + return begin <= datetimeTested <= end + + +def toDatetime(date): + return parse(date) + + +def checkDateConsistancy(begindate, enddate, lastdate): + if begindate is not None and enddate is not None: + if begindate > enddate: + raise DateError('begindate ({}) cannot be after enddate ({})'.format(begindate, enddate)) + + if enddate is not None: + if toDatetime(enddate) < lastdate: + raise DateError('enddate ({}) cannot be before lastdate ({})'.format(enddate, lastdate)) + + if begindate is not None: + if toDatetime(begindate) > datetime.now(): + raise DateError('begindate ({}) cannot be after today ({})'.format(begindate, datetime.now().date())) + + +def setBegindate(begindate, lastdate): + return max(begindate, lastdate) + + +def setEnddate(enddate): + return min(enddate, datetime.now()) + + +def getLastdate(last): + return (datetime.now() - timedelta(days=int(last))).replace(hour=0, minute=0, second=0, microsecond=0) + + +def getNDaysBefore(date, days): + return (date - timedelta(days=days)).replace(hour=0, minute=0, second=0, microsecond=0) + + +def getToday(): + return (datetime.now()).replace(hour=0, minute=0, second=0, microsecond=0) + + +# ############### Tools ################ + + +def getTaxonomies(dataframe): + taxonomies = Taxonomies() + taxonomies = list(taxonomies.keys()) + notInTaxo = [] + count = 0 + for taxonomy in taxonomies: + empty = True + for it in dataframe.iterrows(): + if it[0].startswith(taxonomy): + empty = False + dataframe = dataframe.drop([it[0]]) + count = count + 1 + if empty is True: + notInTaxo.append(taxonomy) + if dataframe.empty: + emptyOther = True + else: + emptyOther = False + for taxonomy in notInTaxo: + taxonomies.remove(taxonomy) + return taxonomies, emptyOther + + +def buildDoubleIndex(index1, index2, datatype): + it = -1 + newindex1 = [] + for index in index2: + if index == 0: + it += 1 + newindex1.append(index1[it]) + arrays = [newindex1, index2] + tuples = list(zip(*arrays)) + return pandas.MultiIndex.from_tuples(tuples, names=['event', datatype]) + + +def buildNewColumn(index2, column): + it = -1 + newcolumn = [] + for index in index2: + if index == 0: + it += 1 + newcolumn.append(column[it]) + return newcolumn + + +def addColumn(dataframe, columnList, columnName): + dataframe.loc[:, columnName] = pandas.Series(columnList, index=dataframe.index) + + +def concat(data): + return pandas.concat(data, axis=1) + + +def createFakeEmptyTagsSeries(): + return pandas.Series({'Faketag': 0}) + + +def removeFaketagRow(dataframe): + return dataframe.drop(['Faketag']) + + +def getCopyDataframe(dataframe): + return dataframe.copy() + + +def createDictTagsColour(colourDict, tags): + temp = tags.groupby(['name', 'colour']).count()['id'] + levels_name = temp.index.levels[0] + levels_colour = temp.index.levels[1] + labels_name = temp.index.labels[0] + labels_colour = temp.index.labels[1] + + for i in range(len(labels_name)): + colourDict[levels_name[labels_name[i]]] = levels_colour[labels_colour[i]] + + +def createTagsPlotStyle(dataframe, colourDict, taxonomy=None): + colours = [] + if taxonomy is not None: + for it in dataframe.iterrows(): + if it[0].startswith(taxonomy): + colours.append(colourDict[it[0]]) + else: + for it in dataframe.iterrows(): + colours.append(colourDict[it[0]]) + + style = Style(background='transparent', + plot_background='#eeeeee', + foreground='#111111', + foreground_strong='#111111', + foreground_subtle='#111111', + opacity='.6', + opacity_hover='.9', + transition='400ms ease-in', + colors=tuple(colours)) + return style + +# ############### Formatting ################ + + +def eventsListBuildFromList(filename): + with open(filename, 'r') as myfile: + s = myfile.read().replace('\n', '') + decoder = JSONDecoder() + s_len = len(s) + Events = [] + end = 0 + while end != s_len: + Event, end = decoder.raw_decode(s, idx=end) + Events.append(Event) + data = [] + for e in Events: + data.append(pandas.DataFrame.from_dict(e, orient='index')) + Events = pandas.concat(data) + for it in range(Events['attribute_count'].size): + if Events['attribute_count'][it] is None: + Events['attribute_count'][it] = '0' + else: + Events['attribute_count'][it] = int(Events['attribute_count'][it]) + Events = Events.set_index('id') + return Events + + +def eventsListBuildFromArray(jdata): + ''' + returns a structure listing all primary events in the sample + ''' + data = [pandas.DataFrame.from_dict(e, orient='index') for e in jdata['response']] + events = pandas.concat(data) + events = events.set_index(['id']) + return events + + +def attributesListBuild(events): + attributes = [pandas.DataFrame(attribute) for attribute in events['Attribute']] + return pandas.concat(attributes) + + +def tagsListBuild(Events): + Tags = [] + if 'Tag' in Events.columns: + for Tag in Events['Tag']: + if type(Tag) is not list: + continue + Tags.append(pandas.DataFrame(Tag)) + if Tags: + Tags = pandas.concat(Tags) + columnDate = buildNewColumn(Tags.index, Events['date']) + addColumn(Tags, columnDate, 'date') + index = buildDoubleIndex(Events.index, Tags.index, 'tag') + Tags = Tags.set_index(index) + else: + Tags = None + return Tags + + +def selectInRange(Events, begin=None, end=None): + inRange = [] + for i, Event in Events.iterrows(): + if dateInRange(parse(Event['date']), begin, end): + inRange.append(Event.tolist()) + inRange = pandas.DataFrame(inRange) + temp = Events.columns.tolist() + if inRange.empty: + return None + inRange.columns = temp + return inRange + + +def isTagIn(dataframe, tag): + temp = dataframe[dataframe['name'].str.contains(tag)].index.tolist() + index = [] + for i in range(len(temp)): + if temp[i][0] not in index: + index.append(temp[i][0]) + return index + + +def renameColumns(dataframe, namelist): + dataframe.columns = namelist + return dataframe + + +def replaceNaN(dataframe, value): + return dataframe.fillna(value) + +# ############### Basic Stats ################ + + +def getNbitems(dataframe): + return len(dataframe.index) + + +def getNbAttributePerEventCategoryType(attributes): + return attributes.groupby(['event_id', 'category', 'type']).count()['id'] + + +def getNbOccurenceTags(Tags): + return Tags.groupby('name').count()['id'] + +# ############### Charts ################ + + +def createTable(colors, categ_types_hash, tablename='attribute_table.html'): + with open(tablename, 'w') as target: + target.write('\n\n\n\n\n') + for categ_name, types in categ_types_hash.items(): + table = pygal.Treemap(pretty_print=True) + target.write('\n

{}

\n'.format(colors[categ_name], categ_name)) + for d in types: + table.add(d['label'], d['value']) + target.write(table.render_table(transpose=True)) + target.write('\n\n') + + +def createTreemap(data, title, treename='attribute_treemap.svg', tablename='attribute_table.html'): + labels_categ = data.index.labels[0] + labels_types = data.index.labels[1] + names_categ = data.index.levels[0] + names_types = data.index.levels[1] + categ_types_hash = {} + for categ_id, type_val, total in zip(labels_categ, labels_types, data): + if not categ_types_hash.get(names_categ[categ_id]): + categ_types_hash[names_categ[categ_id]] = [] + dict_to_print = {'label': names_types[type_val], 'value': total} + categ_types_hash[names_categ[categ_id]].append(dict_to_print) + + colors = {categ: "#%06X" % random.randint(0, 0xFFFFFF) for categ in categ_types_hash.keys()} + style = Style(background='transparent', + plot_background='#FFFFFF', + foreground='#111111', + foreground_strong='#111111', + foreground_subtle='#111111', + opacity='.6', + opacity_hover='.9', + transition='400ms ease-in', + colors=tuple(colors.values())) + + treemap = pygal.Treemap(pretty_print=True, legend_at_bottom=True, style=style) + treemap.title = title + treemap.print_values = True + treemap.print_labels = True + + for categ_name, types in categ_types_hash.items(): + treemap.add(categ_name, types) + + createTable(colors, categ_types_hash) + treemap.render_to_file(treename) + + +def tagsToLineChart(dataframe, title, dates, colourDict): + style = createTagsPlotStyle(dataframe, colourDict) + line_chart = pygal.Line(x_label_rotation=20, style=style, show_legend=False) + line_chart.title = title + line_chart.x_labels = dates + for it in dataframe.iterrows(): + line_chart.add(it[0], it[1].tolist()) + line_chart.render_to_file('tags_repartition_plot.svg') + + +def tagstrendToLineChart(dataframe, title, dates, split, colourDict): + style = createTagsPlotStyle(dataframe, colourDict) + line_chart = pygal.Line(x_label_rotation=20, style=style, show_legend=False) + line_chart.title = title + line_chart.x_labels = dates + xi = numpy.arange(split) + for it in dataframe.iterrows(): + slope, intercept, r_value, p_value, std_err = stats.linregress(xi, it[1]) + line = slope * xi + intercept + line_chart.add(it[0], line, show_dots=False) + line_chart.render_to_file('tags_repartition_trend_plot.svg') + + +def tagsToTaxoLineChart(dataframe, title, dates, colourDict, taxonomies, emptyOther): + style = createTagsPlotStyle(dataframe, colourDict) + line_chart = pygal.Line(x_label_rotation=20, style=style) + line_chart.title = title + line_chart.x_labels = dates + for taxonomy in taxonomies: + taxoStyle = createTagsPlotStyle(dataframe, colourDict, taxonomy) + taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle) + taxo_line_chart.title = title + ': ' + taxonomy + taxo_line_chart.x_labels = dates + for it in dataframe.iterrows(): + if it[0].startswith(taxonomy): + taxo_line_chart.add(re.sub(taxonomy + ':', '', it[0]), it[1].tolist()) + dataframe = dataframe.drop([it[0]]) + taxo_line_chart.render_to_file('plot/' + taxonomy + '.svg') + + if not emptyOther: + taxoStyle = createTagsPlotStyle(dataframe, colourDict) + taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle) + taxo_line_chart.title = title + ': other' + taxo_line_chart.x_labels = dates + for it in dataframe.iterrows(): + taxo_line_chart.add(it[0], it[1].tolist()) + taxo_line_chart.render_to_file('plot/other.svg') + + +def tagstrendToTaxoLineChart(dataframe, title, dates, split, colourDict, taxonomies, emptyOther): + style = createTagsPlotStyle(dataframe, colourDict) + line_chart = pygal.Line(x_label_rotation=20, style=style) + line_chart.title = title + line_chart.x_labels = dates + xi = numpy.arange(split) + for taxonomy in taxonomies: + taxoStyle = createTagsPlotStyle(dataframe, colourDict, taxonomy) + taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle) + taxo_line_chart.title = title + ': ' + taxonomy + taxo_line_chart.x_labels = dates + for it in dataframe.iterrows(): + if it[0].startswith(taxonomy): + slope, intercept, r_value, p_value, std_err = stats.linregress(xi, it[1]) + line = slope * xi + intercept + taxo_line_chart.add(re.sub(taxonomy + ':', '', it[0]), line, show_dots=False) + dataframe = dataframe.drop([it[0]]) + taxo_line_chart.render_to_file('plot/' + taxonomy + '_trend.svg') + + if not emptyOther: + taxoStyle = createTagsPlotStyle(dataframe, colourDict) + taxo_line_chart = pygal.Line(x_label_rotation=20, style=taxoStyle) + taxo_line_chart.title = title + ': other' + taxo_line_chart.x_labels = dates + for it in dataframe.iterrows(): + slope, intercept, r_value, p_value, std_err = stats.linregress(xi, it[1]) + line = slope * xi + intercept + taxo_line_chart.add(it[0], line, show_dots=False) + taxo_line_chart.render_to_file('plot/other_trend.svg') + + +def tagsToPolyChart(dataframe, split, colourDict, taxonomies, emptyOther, order): + for taxonomy in taxonomies: + for it in dataframe.iterrows(): + if it[0].startswith(taxonomy): + points = [] + for i in range(split): + points.append((i, it[1][i])) + color = colourDict[it[0]] + label = re.sub(taxonomy + ':', '', it[0]) + points = numpy.array(points) + dataframe = dataframe.drop([it[0]]) + + # get x and y vectors + x = points[:, 0] + y = points[:, 1] + + # calculate polynomial + z = numpy.polyfit(x, y, order) + f = numpy.poly1d(z) + + # calculate new x's and y's + x_new = numpy.linspace(x[0], x[-1], 50) + y_new = f(x_new) + + plt.plot(x, y, '.', color=color) + plt.plot(x_new, y_new, color=color, label=label + 'trend') + + pylab.title('Polynomial Fit with Matplotlib: ' + taxonomy) + pylab.legend(loc='center left', bbox_to_anchor=(1, 0.5)) + ax = plt.gca() + # ax.set_facecolor((0.898, 0.898, 0.898)) + box = ax.get_position() + ax.set_position([box.x0 - 0.01, box.y0, box.width * 0.78, box.height]) + fig = plt.gcf() + fig.set_size_inches(20, 15) + fig.savefig('plotlib/' + taxonomy + '.png') + fig.clf() + + if not emptyOther: + for it in dataframe.iterrows(): + points = [] + for i in range(split): + points.append((i, it[1][i])) + + color = colourDict[it[0]] + label = it[0] + points = numpy.array(points) + + # get x and y vectors + x = points[:, 0] + y = points[:, 1] + + # calculate polynomial + z = numpy.polyfit(x, y, order) + f = numpy.poly1d(z) + + # calculate new x's and y's + x_new = numpy.linspace(x[0], x[-1], 50) + y_new = f(x_new) + + plt.plot(x, y, '.', color=color, label=label) + plt.plot(x_new, y_new, color=color, label=label + 'trend') + + pylab.title('Polynomial Fit with Matplotlib: other') + pylab.legend(loc='center left', bbox_to_anchor=(1, 0.5)) + ax = plt.gca() + #cax.set_facecolor((0.898, 0.898, 0.898)) + box = ax.get_position() + ax.set_position([box.x0 - 0.01, box.y0, box.width * 0.78, box.height]) + fig = plt.gcf() + fig.set_size_inches(20, 15) + fig.savefig('plotlib/other.png') + + +def createVisualisation(taxonomies): + chain = '\n\n\t\n\t\t\n\t\n\t' + chain = chain + '' + for taxonomy in taxonomies: + chain = chain + '\n' + + chain = chain + '\n' + chain = chain + '
graph
graph
' + chain = chain + '\n\t\n' + + with open('test_tags_trend.html', 'w') as target: + target.write(chain) diff --git a/examples/stats.py b/examples/stats.py new file mode 100755 index 0000000..41d6b28 --- /dev/null +++ b/examples/stats.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +import argparse + + +def init(url, key): + return PyMISP(url, key, misp_verifycert, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Output attributes statistics from a MISP instance.') + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + print (misp.get_attributes_statistics(misp, percentage=True)) + print (misp.get_attributes_statistics(context='category', percentage=True)) diff --git a/examples/suricata.py b/examples/suricata.py index 7fd8d78..b1616e8 100755 --- a/examples/suricata.py +++ b/examples/suricata.py @@ -7,7 +7,7 @@ import argparse def init(url, key): - return PyMISP(url, key, True, 'json') + return PyMISP(url, key, True) def fetch(m, all_events, event): diff --git a/examples/tags.py b/examples/tags.py new file mode 100755 index 0000000..adf5a8d --- /dev/null +++ b/examples/tags.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse +import json + + +def init(url, key): + return PyMISP(url, key, True, 'json', True) + + +def get_tags(m): + result = m.get_all_tags(True) + r = result + print(json.dumps(r) + '\n') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Get tags from MISP instance.') + + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + get_tags(misp) diff --git a/examples/tagstatistics.py b/examples/tagstatistics.py new file mode 100644 index 0000000..4f9fe76 --- /dev/null +++ b/examples/tagstatistics.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key, misp_verifycert +import argparse +import json + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, misp_verifycert, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Get statistics from tags.') + parser.add_argument("-p", "--percentage", action='store_true', default=None, help="An optional field, if set, it will return the results in percentages, otherwise it returns exact count.") + parser.add_argument("-n", "--namesort", action='store_true', default=None, help="An optional field, if set, values are sort by the namespace, otherwise the sorting will happen on the value.") + args = parser.parse_args() + + misp = init(misp_url, misp_key) + + stats = misp.get_tags_statistics(args.percentage, args.namesort) + print(json.dumps(stats)) diff --git a/examples/test_sign.py b/examples/test_sign.py new file mode 100755 index 0000000..106aa29 --- /dev/null +++ b/examples/test_sign.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import argparse + +from pymisp import mispevent + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Sign & verify a MISP event.') + parser.add_argument("-i", "--input", required=True, help="Json file") + parser.add_argument("-u", "--uid", required=True, help="GPG UID") + args = parser.parse_args() + + me = mispevent.MISPEvent() + me.load(args.input) + + me.sign(args.uid) + me.verify(args.uid) diff --git a/examples/up.py b/examples/up.py index f1b5d45..cdca33e 100755 --- a/examples/up.py +++ b/examples/up.py @@ -10,13 +10,13 @@ import argparse def init(url, key): - return PyMISP(url, key, True, 'json') + return PyMISP(url, key, True, 'json', debug=True) def up_event(m, event, content): with open(content, 'r') as f: result = m.update_event(event, f.read()) - print result.text + print(result) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Get an event from a MISP instance.') diff --git a/examples/upload.py b/examples/upload.py index 4f49f20..4c6708d 100755 --- a/examples/upload.py +++ b/examples/upload.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from pymisp import PyMISP -from keys import misp_url, misp_key,misp_verifycert +from keys import misp_url, misp_key, misp_verifycert import argparse import os import glob @@ -12,8 +12,8 @@ def init(url, key): return PyMISP(url, key, misp_verifycert, 'json') -def upload_files(m, eid, paths, distrib, ids, categ, info, analysis, threat): - out = m.upload_samplelist(paths, eid, distrib, ids, categ, info, analysis, threat) +def upload_files(m, eid, paths, distrib, ids, categ, comment, info, analysis, threat): + out = m.upload_samplelist(paths, eid, distrib, ids, categ, comment, info, analysis, threat) print(out) if __name__ == '__main__': @@ -26,6 +26,7 @@ if __name__ == '__main__': parser.add_argument("-i", "--info", help="Used to populate the event info field if no event ID supplied.") parser.add_argument("-a", "--analysis", type=int, help="The analysis level of the newly created event, if applicatble. [0-2]") parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [1-4]") + parser.add_argument("-co", "--comment", type=str, help="Comment for the uploaded file(s).") args = parser.parse_args() misp = init(misp_url, misp_key) @@ -39,4 +40,4 @@ if __name__ == '__main__': print('invalid file') exit(0) - upload_files(misp, args.event, files, args.distrib, args.ids, args.categ, args.info, args.analysis, args.threat) + upload_files(misp, args.event, files, args.distrib, args.ids, args.categ, args.comment, args.info, args.analysis, args.threat) diff --git a/examples/user_sample.json b/examples/user_sample.json new file mode 100644 index 0000000..626b519 --- /dev/null +++ b/examples/user_sample.json @@ -0,0 +1,6 @@ +{ + "email":"maaiil@domain.lu", + "org_id":1, + "role_id":1, + "autoalert":1 +} diff --git a/examples/users_list.py b/examples/users_list.py new file mode 100644 index 0000000..78620ee --- /dev/null +++ b/examples/users_list.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import PyMISP +from keys import misp_url, misp_key +import argparse + +# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one +try: + input = raw_input +except NameError: + pass + + +def init(url, key): + return PyMISP(url, key, True, 'json') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Get a list of the sharing groups from the MISP instance.') + + misp = init(misp_url, misp_key) + + users_list = misp.get_users_list() + print (users_list) diff --git a/pymisp/__init__.py b/pymisp/__init__.py index d93ebf1..e168ce7 100644 --- a/pymisp/__init__.py +++ b/pymisp/__init__.py @@ -1,3 +1,7 @@ -__version__ = '2.2' +__version__ = '2.4.56' -from .api import PyMISP, PyMISPError, NewEventError, NewAttributeError, MissingDependency, NoURL, NoKey +from .exceptions import PyMISPError, NewEventError, NewAttributeError, MissingDependency, NoURL, NoKey +from .api import PyMISP +from .mispevent import MISPEvent, MISPAttribute, EncodeUpdate, EncodeFull +from .tools.neo4j import Neo4j +from .tools import stix diff --git a/pymisp/api.py b/pymisp/api.py index 8c95c25..572af35 100644 --- a/pymisp/api.py +++ b/pymisp/api.py @@ -1,22 +1,23 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" Python API using the REST interface of MISP """ +"""Python API using the REST interface of MISP""" +import sys import json import datetime import os import base64 import re +import warnings try: from urllib.parse import urljoin except ImportError: from urlparse import urljoin + warnings.warn("You're using python 2, it is strongly recommended to use python >=3.4") from io import BytesIO import zipfile -import warnings -import functools try: import requests @@ -25,71 +26,63 @@ except ImportError: HAVE_REQUESTS = False from . import __version__ +from .exceptions import PyMISPError, SearchError, MissingDependency, NoURL, NoKey +from .mispevent import MISPEvent, MISPAttribute, EncodeUpdate + # Least dirty way to support python 2 and 3 try: basestring + warnings.warn("You're using python 2, it is strongly recommended to use python >=3.4") except NameError: basestring = str -class PyMISPError(Exception): - def __init__(self, message): - super(PyMISPError, self).__init__(message) - self.message = message +class distributions(object): + """Enumeration of the available distributions.""" + your_organization = 0 + this_community = 1 + connected_communities = 2 + all_communities = 3 + sharing_group = 4 -class NewEventError(PyMISPError): - pass +class threat_level(object): + """Enumeration of the available threat levels.""" + high = 1 + medium = 2 + low = 3 + undefined = 4 -class NewAttributeError(PyMISPError): - pass - - -class MissingDependency(PyMISPError): - pass - - -class NoURL(PyMISPError): - pass - - -class NoKey(PyMISPError): - pass - - -def deprecated(func): - '''This is a decorator which can be used to mark functions - as deprecated. It will result in a warning being emitted - when the function is used.''' - - @functools.wraps(func) - def new_func(*args, **kwargs): - warnings.warn_explicit( - "Call to deprecated function {}.".format(func.__name__), - category=DeprecationWarning, - filename=func.__code__.co_filename, - lineno=func.__code__.co_firstlineno + 1 - ) - return func(*args, **kwargs) - return new_func +class analysis(object): + """Enumeration of the available analysis statuses.""" + initial = 0 + ongoing = 1 + completed = 2 class PyMISP(object): - """ - Python API for MISP + """Python API for MISP - :param url: URL of the MISP instance you want to connect to - :param key: API key of the user you want to use - :param ssl: can be True or False (to check ot not the validity - of the certificate. Or a CA_BUNDLE in case of self - signed certiifcate (the concatenation of all the - *.crt of the chain) - :param out_type: Type of object (json or xml) + :param url: URL of the MISP instance you want to connect to + :param key: API key of the user you want to use + :param ssl: can be True or False (to check ot not the validity + of the certificate. Or a CA_BUNDLE in case of self + signed certiifcate (the concatenation of all the + *.crt of the chain) + :param out_type: Type of object (json) NOTE: XML output isn't supported anymore, keeping the flag for compatibility reasons. + :param debug: print all the messages received from the server + :param proxies: Proxy dict as describes here: http://docs.python-requests.org/en/master/user/advanced/#proxies + :param cert: Client certificate, as described there: http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification """ - def __init__(self, url, key, ssl=True, out_type='json', debug=False): + # So it can may be accessed from the misp object. + distributions = distributions + threat_level = threat_level + analysis = analysis + + def __init__(self, url, key, ssl=True, out_type='json', debug=False, proxies=None, cert=None): if not url: raise NoURL('Please provide the URL of your MISP instance.') if not key: @@ -98,48 +91,64 @@ class PyMISP(object): self.root_url = url self.key = key self.ssl = ssl - self.out_type = out_type + self.proxies = proxies + self.cert = cert + self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') + if out_type != 'json': + raise PyMISPError('The only output type supported by PyMISP is JSON. If you still rely on XML, use PyMISP v2.4.49') self.debug = debug - self.categories = ['Internal reference', 'Targeting data', 'Antivirus detection', - 'Payload delivery', 'Payload installation', 'Artifacts dropped', - 'Persistence mechanism', 'Network activity', 'Payload type', - 'Attribution', 'External analysis', 'Other'] - self.types = ['md5', 'sha1', 'sha256', 'filename', 'filename|md5', 'filename|sha1', - 'filename|sha256', 'ip-src', 'ip-dst', 'hostname', 'domain', 'url', - 'user-agent', 'http-method', 'regkey', 'regkey|value', 'AS', 'snort', - 'pattern-in-file', 'pattern-in-traffic', 'pattern-in-memory', 'named pipe', - 'mutex', 'vulnerability', 'attachment', 'malware-sample', 'link', 'comment', - 'text', 'email-src', 'email-dst', 'email-subject', 'email-attachment', - 'yara', 'target-user', 'target-email', 'target-machine', 'target-org', - 'target-location', 'target-external', 'other'] - try: # Make sure the MISP instance is working and the URL is valid - self.get_version() + response = self.get_version() + misp_version = response['version'].split('.') + pymisp_version = __version__.split('.') + for a, b in zip(misp_version, pymisp_version): + if a == b: + continue + elif a < b: + warnings.warn("Remote MISP instance (v{}) older than PyMISP (v{}). You should update your MISP instance, or install an older PyMISP version.".format(response['version'], __version__)) + else: # a > b + # NOTE: That can happen and should not be blocking + warnings.warn("Remote MISP instance (v{}) newer than PyMISP (v{}). Please check if a newer version of PyMISP is available.".format(response['version'], __version__)) + continue + except Exception as e: raise PyMISPError('Unable to connect to MISP ({}). Please make sure the API key and the URL are correct (http/https is required): {}'.format(self.root_url, e)) - def __prepare_session(self, force_out=None): - """ - Prepare the headers of the session + try: + session = self.__prepare_session() + response = session.get(urljoin(self.root_url, 'attributes/describeTypes.json')) + describe_types = self._check_response(response) + if describe_types.get('error'): + for e in describe_types.get('error'): + raise PyMISPError('Failed: {}'.format(e)) + self.describe_types = describe_types['result'] + if not self.describe_types.get('sane_defaults'): + raise PyMISPError('The MISP server your are trying to reach is outdated (<2.4.52). Please use PyMISP v2.4.51.1 (pip install -I PyMISP==v2.4.51.1) and/or contact your administrator.') + except: + describe_types = json.load(open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r')) + self.describe_types = describe_types['result'] - :param force_out: force the type of the expect output - (overwrite the constructor) + self.categories = self.describe_types['categories'] + self.types = self.describe_types['types'] + self.category_type_mapping = self.describe_types['category_type_mappings'] + self.sane_default = self.describe_types['sane_defaults'] + + def __prepare_session(self, output='json'): + """Prepare the headers of the session""" - """ if not HAVE_REQUESTS: raise MissingDependency('Missing dependency, install requests (`pip install requests`)') - if force_out is not None: - out = force_out - else: - out = self.out_type session = requests.Session() session.verify = self.ssl + session.proxies = self.proxies + session.cert = self.cert session.headers.update( {'Authorization': self.key, - 'Accept': 'application/' + out, - 'content-type': 'application/' + out}) + 'Accept': 'application/{}'.format(output), + 'content-type': 'application/{}'.format(output), + 'User-Agent': 'PyMISP {} - Python {}.{}.{}'.format(__version__, *sys.version_info)}) return session def flatten_error_messages(self, response): @@ -153,10 +162,23 @@ class PyMISP(object): elif response.get('errors'): if isinstance(response['errors'], dict): for where, errors in response['errors'].items(): - for e in errors: - for type_e, msgs in e.items(): - for m in msgs: - messages.append('Error in {}: {}'.format(where, m)) + if isinstance(errors, dict): + for where, msg in errors.items(): + if isinstance(msg, list): + for m in msg: + messages.append('Error in {}: {}'.format(where, m)) + else: + messages.append('Error in {}: {}'.format(where, msg)) + else: + for e in errors: + if not e: + continue + if isinstance(e, str): + messages.append(e) + continue + for type_e, msgs in e.items(): + for m in msgs: + messages.append('Error in {}: {}'.format(where, m)) return messages def _check_response(self, response): @@ -170,6 +192,8 @@ class PyMISP(object): raise PyMISPError('Unknown error: {}'.format(response.text)) errors = [] + if isinstance(to_return, list): + to_return = {'response': to_return} if to_return.get('error'): if not isinstance(to_return['error'], list): errors.append(to_return['error']) @@ -192,186 +216,209 @@ class PyMISP(object): # ############### Simple REST API ################ # ################################################ - def get_index(self, force_out=None, filters=None): - """ - Return the index. + def get_index(self, filters=None): + """Return the index. - Warning, there's a limit on the number of results + Warning, there's a limit on the number of results """ - session = self.__prepare_session(force_out) + session = self.__prepare_session() url = urljoin(self.root_url, 'events/index') if filters is not None: filters = json.dumps(filters) - print filters - return session.post(url, data=filters) + response = session.post(url, data=filters) else: - return session.get(url) + response = session.get(url) + return self._check_response(response) - def get_event(self, event_id, force_out=None): - """ - Get an event + def get_event(self, event_id): + """Get an event - :param event_id: Event id to get + :param event_id: Event id to get """ - session = self.__prepare_session(force_out) + session = self.__prepare_session() url = urljoin(self.root_url, 'events/{}'.format(event_id)) - return session.get(url) + response = session.get(url) + return self._check_response(response) - def add_event(self, event, force_out=None): - """ - Add a new event + def get_stix_event(self, event_id=None, with_attachments=False, from_date=False, to_date=False, tags=False): + """Get an event/events in STIX format""" + if tags: + if isinstance(tags, list): + tags = "&&".join(tags) - :param event: Event as JSON object / string or XML to add + session = self.__prepare_session() + url = urljoin(self.root_url, "/events/stix/download/{}/{}/{}/{}/{}".format( + event_id, with_attachments, tags, from_date, to_date)) + if self.debug: + print("Getting STIX event from {}".format(url)) + response = session.get(url) + return self._check_response(response) + + def add_event(self, event): + """Add a new event + + :param event: Event as JSON object / string or XML to add """ - session = self.__prepare_session(force_out) + session = self.__prepare_session() url = urljoin(self.root_url, 'events') - if self.out_type == 'json': - if isinstance(event, basestring): - return session.post(url, data=event) - else: - return session.post(url, data=json.dumps(event)) + if isinstance(event, basestring): + response = session.post(url, data=event) else: - return session.post(url, data=event) + response = session.post(url, data=json.dumps(event)) + return self._check_response(response) - def update_event(self, event_id, event, force_out=None): - """ - Update an event + def update_event(self, event_id, event): + """Update an event - :param event_id: Event id to update - :param event: Event as JSON object / string or XML to add + :param event_id: Event id to update + :param event: Event as JSON object / string or XML to add """ - session = self.__prepare_session(force_out) + session = self.__prepare_session() url = urljoin(self.root_url, 'events/{}'.format(event_id)) - if self.out_type == 'json': - if isinstance(event, basestring): - return session.post(url, data=event) - else: - return session.post(url, data=json.dumps(event)) + if isinstance(event, basestring): + response = session.post(url, data=event) else: - return session.post(url, data=event) + response = session.post(url, data=json.dumps(event)) + return self._check_response(response) - def delete_event(self, event_id, force_out=None): - """ - Delete an event + def delete_event(self, event_id): + """Delete an event - :param event_id: Event id to delete + :param event_id: Event id to delete """ - session = self.__prepare_session(force_out) + session = self.__prepare_session() url = urljoin(self.root_url, 'events/{}'.format(event_id)) - return session.delete(url) + response = session.delete(url) + return self._check_response(response) - def delete_attribute(self, attribute_id, force_out=None): - session = self.__prepare_session(force_out) + def delete_attribute(self, attribute_id): + session = self.__prepare_session() url = urljoin(self.root_url, 'attributes/{}'.format(attribute_id)) - return session.delete(url) + response = session.delete(url) + return self._check_response(response) # ############################################## # ######### Event handling (Json only) ######### # ############################################## - def _prepare_full_event(self, distribution, threat_level_id, analysis, info, date=None, published=False): - to_return = {'Event': {}} - # Setup details of a new event - if distribution not in [0, 1, 2, 3]: - raise NewEventError('{} is invalid, the distribution has to be in 0, 1, 2, 3'.format(distribution)) - if threat_level_id not in [1, 2, 3, 4]: - raise NewEventError('{} is invalid, the threat_level_id has to be in 1, 2, 3, 4'.format(threat_level_id)) - if analysis not in [0, 1, 2]: - raise NewEventError('{} is invalid, the analysis has to be in 0, 1, 2'.format(analysis)) - if date is None: - date = datetime.date.today().isoformat() - if published not in [True, False]: - raise NewEventError('{} is invalid, published has to be True or False'.format(published)) - to_return['Event'] = {'distribution': distribution, 'info': info, 'date': date, 'published': published, - 'threat_level_id': threat_level_id, 'analysis': analysis} - return to_return + def _prepare_full_event(self, distribution, threat_level_id, analysis, info, date=None, published=False, orgc_id=None, org_id=None, sharing_group_id=None): + misp_event = MISPEvent(self.describe_types) + misp_event.set_all_values(info=info, distribution=distribution, threat_level_id=threat_level_id, + analysis=analysis, date=date, orgc_id=orgc_id, org_id=org_id, sharing_group_id=sharing_group_id) + if published: + misp_event.publish() + return misp_event - def _prepare_full_attribute(self, category, type_value, value, to_ids, comment=None, distribution=None): - to_return = {} - if category not in self.categories: - raise NewAttributeError('{} is invalid, category has to be in {}'.format(category, (', '.join(self.categories)))) - to_return['category'] = category + def _prepare_full_attribute(self, category, type_value, value, to_ids, comment=None, distribution=5, **kwargs): + misp_attribute = MISPAttribute(self.describe_types) + misp_attribute.set_all_values(type=type_value, value=value, category=category, + to_ids=to_ids, comment=comment, distribution=distribution, **kwargs) + return misp_attribute - if type_value not in self.types: - raise NewAttributeError('{} is invalid, type_value has to be in {}'.format(type_value, (', '.join(self.types)))) - to_return['type'] = type_value - - if to_ids not in [True, False]: - raise NewAttributeError('{} is invalid, to_ids has to be True or False'.format(to_ids)) - to_return['to_ids'] = to_ids - - if distribution is not None: - distribution = int(distribution) - # If None: take the default value of the event - if distribution not in [None, 0, 1, 2, 3]: - raise NewAttributeError('{} is invalid, the distribution has to be in 0, 1, 2, 3 or None'.format(distribution)) - if distribution is not None: - to_return['distribution'] = distribution - - to_return['value'] = value - - if comment is not None: - to_return['comment'] = comment - - return to_return - - def _prepare_update(self, event): - # Cleanup the received event to make it publishable - event['Event'].pop('locked', None) - event['Event'].pop('attribute_count', None) - event['Event'].pop('RelatedEvent', None) - event['Event'].pop('orgc', None) - event['Event'].pop('ShadowAttribute', None) - event['Event'].pop('org', None) - event['Event'].pop('proposal_email_lock', None) - event['Event'].pop('publish_timestamp', None) - event['Event'].pop('published', None) - event['Event'].pop('timestamp', None) - event['Event']['id'] = int(event['Event']['id']) - return event + def _one_or_more(self, value): + """Returns a list/tuple of one or more items, regardless of input.""" + return value if isinstance(value, (tuple, list)) else (value,) # ########## Helpers ########## + def _make_mispevent(self, event): + if not isinstance(event, MISPEvent): + e = MISPEvent(self.describe_types) + e.load(event) + else: + e = event + return e + def get(self, eid): - response = self.get_event(int(eid), 'json') - return self._check_response(response) + return self.get_event(eid) + + def get_stix(self, **kwargs): + return self.get_stix_event(**kwargs) def update(self, event): - eid = event['Event']['id'] - response = self.update_event(eid, event, 'json') - return self._check_response(response) - - def new_event(self, distribution=None, threat_level_id=None, analysis=None, info=None, date=None, published=False): - data = self._prepare_full_event(distribution, threat_level_id, analysis, info, date, published) - response = self.add_event(data, 'json') - return self._check_response(response) + e = self._make_mispevent(event) + if e.uuid: + eid = e.uuid + else: + eid = e.id + return self.update_event(eid, json.dumps(e, cls=EncodeUpdate)) def publish(self, event): - if event['Event']['published']: + e = self._make_mispevent(event) + if e.published: return {'error': 'Already published'} - event = self._prepare_update(event) - event['Event']['published'] = True - response = self.update_event(event['Event']['id'], event, 'json') + e.publish() + return self.update(event) + + def change_threat_level(self, event, threat_level_id): + e = self._make_mispevent(event) + e.threat_level_id = threat_level_id + return self.update(event) + + def change_sharing_group(self, event, sharing_group_id): + e = self._make_mispevent(event) + e.distribution = 4 # Needs to be 'Sharing group' + e.sharing_group_id = sharing_group_id + return self.update(event) + + def new_event(self, distribution=None, threat_level_id=None, analysis=None, info=None, date=None, published=False, orgc_id=None, org_id=None, sharing_group_id=None): + misp_event = self._prepare_full_event(distribution, threat_level_id, analysis, info, date, published, orgc_id, org_id, sharing_group_id) + return self.add_event(json.dumps(misp_event, cls=EncodeUpdate)) + + def add_tag(self, event, tag, attribute=False): + # FIXME: this is dirty, this function needs to be deprecated with something tagging a UUID + session = self.__prepare_session() + if attribute: + to_post = {'request': {'Attribute': {'id': event['id'], 'tag': tag}}} + path = 'attributes/addTag' + else: + to_post = {'request': {'Event': {'id': event['id'], 'tag': tag}}} + path = 'events/addTag' + response = session.post(urljoin(self.root_url, path), data=json.dumps(to_post)) return self._check_response(response) + def remove_tag(self, event, tag, attribute=False): + # FIXME: this is dirty, this function needs to be deprecated with something removing the tag to a UUID + session = self.__prepare_session() + if attribute: + to_post = {'request': {'Attribute': {'id': event['id'], 'tag': tag}}} + path = 'attributes/addTag' + else: + to_post = {'request': {'Event': {'id': event['Event']['id'], 'tag': tag}}} + path = 'events/addTag' + response = session.post(urljoin(self.root_url, path), data=json.dumps(to_post)) + return self._check_response(response) + + def _valid_uuid(self, uuid): + """Test if uuid is valid + Will test against CakeText's RFC 4122, i.e + "the third group must start with a 4, + and the fourth group must start with 8, 9, a or b." + + :param uuid: an uuid + """ + regex = re.compile('^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) + match = regex.match(uuid) + return bool(match) # ##### File attributes ##### def _send_attributes(self, event, attributes, proposal=False): if proposal: response = self.proposal_add(event['Event']['id'], attributes) else: - event = self._prepare_update(event) - for a in attributes: - if a.get('distribution') is None: - a['distribution'] = event['Event']['distribution'] - event['Event']['Attribute'] = attributes - response = self.update_event(event['Event']['id'], event, 'json') - return self._check_response(response) + e = MISPEvent(self.describe_types) + e.load(event) + e.attributes += attributes + response = self.update(event) + return response - def add_hashes(self, event, category='Artifacts dropped', filename=None, md5=None, sha1=None, sha256=None, comment=None, to_ids=True, distribution=None, proposal=False): - categories = ['Payload delivery', 'Artifacts dropped', 'Payload installation', 'External analysis'] - if category not in categories: - raise NewAttributeError('{} is invalid, category has to be in {}'.format(category, (', '.join(categories)))) + def add_named_attribute(self, event, type_value, value, category=None, to_ids=False, comment=None, distribution=None, proposal=False, **kwargs): + attributes = [] + for value in self._one_or_more(value): + attributes.append(self._prepare_full_attribute(category, type_value, value, to_ids, comment, distribution, **kwargs)) + return self._send_attributes(event, attributes, proposal) + + def add_hashes(self, event, category='Artifacts dropped', filename=None, md5=None, sha1=None, sha256=None, ssdeep=None, comment=None, to_ids=True, distribution=None, proposal=False): attributes = [] type_value = '{}' @@ -388,12 +435,52 @@ class PyMISP(object): if sha256: attributes.append(self._prepare_full_attribute(category, type_value.format('sha256'), value.format(sha256), to_ids, comment, distribution)) + if ssdeep: + attributes.append(self._prepare_full_attribute(category, type_value.format('ssdeep'), value.format(ssdeep), + to_ids, comment, distribution)) return self._send_attributes(event, attributes, proposal) + def av_detection_link(self, event, link, category='Antivirus detection', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'link', link, category, to_ids, comment, distribution, proposal) + + def add_detection_name(self, event, name, category='Antivirus detection', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'text', name, category, to_ids, comment, distribution, proposal) + + def add_filename(self, event, filename, category='Artifacts dropped', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'filename', filename, category, to_ids, comment, distribution, proposal) + + def add_attachment(self, event, filename, attachment=None, category='Artifacts dropped', to_ids=False, comment=None, distribution=None, proposal=False): + """Add an attachment to the MISP event + + :param event: The event to add an attachment to + :param filename: The name you want to store the file under + :param attachment: Either a file handle or a path to a file - will be uploaded + """ + + if hasattr(attachment, "read"): + # It's a file handle - we can read it + fileData = attachment.read() + + elif isinstance(attachment, str): + # It can either be the b64 encoded data or a file path + if os.path.exists(attachment): + # It's a path! + with open(attachment, "r") as f: + fileData = f.read() + else: + # We have to assume it's the actual data + fileData = attachment + + # by now we have a string for the file + # we just need to b64 encode it and send it on its way + # also, just decode it to utf-8 to avoid the b'string' format + encodedData = base64.b64encode(fileData.encode("utf-8")).decode("utf-8") + + # Send it on its way + return self.add_named_attribute(event, 'attachment', filename, category, to_ids, comment, distribution, proposal, data=encodedData) + def add_regkey(self, event, regkey, rvalue=None, category='Artifacts dropped', to_ids=True, comment=None, distribution=None, proposal=False): - type_value = '{}' - value = '{}' if rvalue: type_value = 'regkey|value' value = '{}|{}'.format(regkey, rvalue) @@ -405,147 +492,142 @@ class PyMISP(object): attributes.append(self._prepare_full_attribute(category, type_value, value, to_ids, comment, distribution)) return self._send_attributes(event, attributes, proposal) - def add_pattern(self, event, pattern, in_file=True, in_memory=False, category='Artifacts dropped', to_ids=True, comment=None, distribution=None, proposal=False): + def add_regkeys(self, event, regkeys_values, category='Artifacts dropped', to_ids=True, comment=None, distribution=None, proposal=False): attributes = [] - if in_file: - attributes.append(self._prepare_full_attribute(category, 'pattern-in-file', pattern, to_ids, comment, distribution)) - if in_memory: - attributes.append(self._prepare_full_attribute(category, 'pattern-in-memory', pattern, to_ids, comment, distribution)) + for regkey, rvalue in regkeys_values.items(): + if rvalue is not None: + type_value = 'regkey|value' + value = '{}|{}'.format(regkey, rvalue) + else: + type_value = 'regkey' + value = regkey + + attributes.append(self._prepare_full_attribute(category, type_value, value, to_ids, comment, distribution)) return self._send_attributes(event, attributes, proposal) + def add_pattern(self, event, pattern, in_file=True, in_memory=False, category='Artifacts dropped', to_ids=True, comment=None, distribution=None, proposal=False): + if not (in_file or in_memory): + raise PyMISPError('Invalid pattern type: please use in_memory=True or in_file=True') + itemtype = 'pattern-in-file' if in_file else 'pattern-in-memory' + return self.add_named_attribute(event, itemtype, pattern, category, to_ids, comment, distribution, proposal) + def add_pipe(self, event, named_pipe, category='Artifacts dropped', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - if not named_pipe.startswith('\\.\\pipe\\'): - named_pipe = '\\.\\pipe\\{}'.format(named_pipe) - attributes.append(self._prepare_full_attribute(category, 'named pipe', named_pipe, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def scrub(s): + if not s.startswith('\\.\\pipe\\'): + s = '\\.\\pipe\\{}'.format(s) + return s + attributes = list(map(scrub, self._one_or_more(named_pipe))) + return self.add_named_attribute(event, 'named pipe', attributes, category, to_ids, comment, distribution, proposal) def add_mutex(self, event, mutex, category='Artifacts dropped', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - if not mutex.startswith('\\BaseNamedObjects\\'): - mutex = '\\BaseNamedObjects\\{}'.format(mutex) - attributes.append(self._prepare_full_attribute(category, 'mutex', mutex, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def scrub(s): + if not s.startswith('\\BaseNamedObjects\\'): + s = '\\BaseNamedObjects\\{}'.format(s) + return s + attributes = list(map(scrub, self._one_or_more(mutex))) + return self.add_named_attribute(event, 'mutex', attributes, category, to_ids, comment, distribution, proposal) + + def add_yara(self, event, yara, category='Payload delivery', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'yara', yara, category, to_ids, comment, distribution, proposal) # ##### Network attributes ##### def add_ipdst(self, event, ipdst, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'ip-dst', ipdst, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'ip-dst', ipdst, category, to_ids, comment, distribution, proposal) def add_ipsrc(self, event, ipsrc, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'ip-src', ipsrc, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'ip-src', ipsrc, category, to_ids, comment, distribution, proposal) def add_hostname(self, event, hostname, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'hostname', hostname, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'hostname', hostname, category, to_ids, comment, distribution, proposal) def add_domain(self, event, domain, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'domain', domain, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'domain', domain, category, to_ids, comment, distribution, proposal) + + def add_domain_ip(self, event, domain, ip, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): + composed = list(map(lambda x: '%s|%s' % (domain, x), ip)) + return self.add_named_attribute(event, 'domain|ip', composed, category, to_ids, comment, distribution, proposal) + + def add_domains_ips(self, event, domain_ips, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): + composed = list(map(lambda x: '%s|%s' % (x[0], x[1]), domain_ips.items())) + return self.add_named_attribute(event, 'domain|ip', composed, category, to_ids, comment, distribution, proposal) def add_url(self, event, url, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'url', url, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'url', url, category, to_ids, comment, distribution, proposal) def add_useragent(self, event, useragent, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'user-agent', useragent, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'user-agent', useragent, category, to_ids, comment, distribution, proposal) def add_traffic_pattern(self, event, pattern, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'pattern-in-traffic', pattern, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'pattern-in-traffic', pattern, category, to_ids, comment, distribution, proposal) def add_snort(self, event, snort, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'snort', snort, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'snort', snort, category, to_ids, comment, distribution, proposal) + + def add_net_other(self, event, netother, category='Network activity', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'other', netother, category, to_ids, comment, distribution, proposal) # ##### Email attributes ##### - def add_email_src(self, event, email, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Payload delivery', 'email-src', email, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_email_src(self, event, email, category='Payload delivery', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'email-src', email, category, to_ids, comment, distribution, proposal) def add_email_dst(self, event, email, category='Payload delivery', to_ids=True, comment=None, distribution=None, proposal=False): - categories = ['Payload delivery', 'Network activity'] - if category not in categories: - raise NewAttributeError('{} is invalid, category has to be in {}'.format(category, (', '.join(categories)))) - attributes = [] - attributes.append(self._prepare_full_attribute(category, 'email-dst', email, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + return self.add_named_attribute(event, 'email-dst', email, category, to_ids, comment, distribution, proposal) - def add_email_subject(self, event, email, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Payload delivery', 'email-subject', email, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_email_subject(self, event, email, category='Payload delivery', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'email-subject', email, category, to_ids, comment, distribution, proposal) - def add_email_attachment(self, event, email, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Payload delivery', 'email-attachment', email, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_email_attachment(self, event, email, category='Payload delivery', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'email-attachment', email, category, to_ids, comment, distribution, proposal) # ##### Target attributes ##### - def add_target_email(self, event, target, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Targeting data', 'target-email', target, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_target_email(self, event, target, category='Targeting data', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'target-email', target, category, to_ids, comment, distribution, proposal) - def add_target_user(self, event, target, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Targeting data', 'target-user', target, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_target_user(self, event, target, category='Targeting data', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'target-user', target, category, to_ids, comment, distribution, proposal) - def add_target_machine(self, event, target, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Targeting data', 'target-machine', target, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_target_machine(self, event, target, category='Targeting data', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'target-machine', target, category, to_ids, comment, distribution, proposal) - def add_target_org(self, event, target, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Targeting data', 'target-org', target, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_target_org(self, event, target, category='Targeting data', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'target-org', target, category, to_ids, comment, distribution, proposal) - def add_target_location(self, event, target, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Targeting data', 'target-location', target, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_target_location(self, event, target, category='Targeting data', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'target-location', target, category, to_ids, comment, distribution, proposal) - def add_target_external(self, event, target, to_ids=True, comment=None, distribution=None, proposal=False): - attributes = [] - attributes.append(self._prepare_full_attribute('Targeting data', 'target-external', target, to_ids, comment, distribution)) - return self._send_attributes(event, attributes, proposal) + def add_target_external(self, event, target, category='Targeting data', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'target-external', target, category, to_ids, comment, distribution, proposal) + + # ##### Attribution attributes ##### + + def add_threat_actor(self, event, target, category='Attribution', to_ids=True, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'threat-actor', target, category, to_ids, comment, distribution, proposal) + + # ##### Internal reference attributes ##### + + def add_internal_link(self, event, reference, category='Internal reference', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'link', reference, category, to_ids, comment, distribution, proposal) + + def add_internal_comment(self, event, reference, category='Internal reference', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'comment', reference, category, to_ids, comment, distribution, proposal) + + def add_internal_text(self, event, reference, category='Internal reference', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'text', reference, category, to_ids, comment, distribution, proposal) + + def add_internal_other(self, event, reference, category='Internal reference', to_ids=False, comment=None, distribution=None, proposal=False): + return self.add_named_attribute(event, 'other', reference, category, to_ids, comment, distribution, proposal) # ################################################## # ######### Upload samples through the API ######### # ################################################## - def _create_event(self, distribution, threat_level_id, analysis, info): - # Setup details of a new event - if distribution not in [0, 1, 2, 3]: - raise NewEventError('{} is invalid, the distribution has to be in 0, 1, 2, 3'.format(distribution)) - if threat_level_id not in [0, 1, 2, 3]: - raise NewEventError('{} is invalid, the threat_level_id has to be in 0, 1, 2, 3'.format(threat_level_id)) - if analysis not in [0, 1, 2]: - raise NewEventError('{} is invalid, the analysis has to be in 0, 1, 2'.format(analysis)) - return {'distribution': int(distribution), 'info': info, - 'threat_level_id': int(threat_level_id), 'analysis': analysis} - - def prepare_attribute(self, event_id, distribution, to_ids, category, info, - analysis, threat_level_id): + def _prepare_upload(self, event_id, distribution, to_ids, category, comment, info, + analysis, threat_level_id): to_post = {'request': {}} - authorized_categs = ['Payload delivery', 'Artifacts dropped', 'Payload Installation', 'External Analysis'] if event_id is not None: try: @@ -554,35 +636,43 @@ class PyMISP(object): pass if not isinstance(event_id, int): # New event - to_post['request'] = self._create_event(distribution, threat_level_id, analysis, info) + misp_event = self._prepare_full_event(distribution, threat_level_id, analysis, info) + to_post['request']['distribution'] = misp_event.distribution + to_post['request']['info'] = misp_event.info + to_post['request']['analysis'] = misp_event.analysis + to_post['request']['threat_level_id'] = misp_event.threat_level_id else: to_post['request']['event_id'] = int(event_id) - if to_ids not in [True, False]: - raise NewAttributeError('{} is invalid, to_ids has to be True or False'.format(to_ids)) + default_values = self.sane_default['malware-sample'] + if to_ids is None or not isinstance(to_ids, bool): + to_ids = bool(int(default_values['to_ids'])) to_post['request']['to_ids'] = to_ids - if category not in authorized_categs: - raise NewAttributeError('{} is invalid, category has to be in {}'.format(category, (', '.join(authorized_categs)))) + if category is None or category not in self.categories: + category = default_values['default_category'] to_post['request']['category'] = category + to_post['request']['comment'] = comment return to_post def _encode_file_to_upload(self, path): with open(path, 'rb') as f: - return base64.b64encode(f.read()) + return str(base64.b64encode(f.read())) - def upload_sample(self, filename, filepath, event_id, distribution, to_ids, - category, info, analysis, threat_level_id): - to_post = self.prepare_attribute(event_id, distribution, to_ids, category, - info, analysis, threat_level_id) + def upload_sample(self, filename, filepath, event_id, distribution=None, + to_ids=True, category=None, comment=None, info=None, + analysis=None, threat_level_id=None): + to_post = self._prepare_upload(event_id, distribution, to_ids, category, + comment, info, analysis, threat_level_id) to_post['request']['files'] = [{'filename': filename, 'data': self._encode_file_to_upload(filepath)}] return self._upload_sample(to_post) - def upload_samplelist(self, filepaths, event_id, distribution, to_ids, category, - info, analysis, threat_level_id): - to_post = self.prepare_attribute(event_id, distribution, to_ids, category, - info, analysis, threat_level_id) + def upload_samplelist(self, filepaths, event_id, distribution=None, + to_ids=True, category=None, comment=None, info=None, + analysis=None, threat_level_id=None): + to_post = self._prepare_upload(event_id, distribution, to_ids, category, + comment, info, analysis, threat_level_id) files = [] for path in filepaths: if not os.path.isfile(path): @@ -592,7 +682,7 @@ class PyMISP(object): return self._upload_sample(to_post) def _upload_sample(self, to_post): - session = self.__prepare_session('json') + session = self.__prepare_session() url = urljoin(self.root_url, 'events/upload_sample') response = session.post(url, data=json.dumps(to_post)) return self._check_response(response) @@ -602,22 +692,18 @@ class PyMISP(object): # ############################ def __query_proposal(self, session, path, id, attribute=None): - path = path.strip('/') url = urljoin(self.root_url, 'shadow_attributes/{}/{}'.format(path, id)) - query = None if path in ['add', 'edit']: query = {'request': {'ShadowAttribute': attribute}} - if path == 'view': + response = session.post(url, data=json.dumps(query)) + elif path == 'view': response = session.get(url) - else: - if query is not None: - response = session.post(url, data=json.dumps(query)) - else: - response = session.post(url) + else: # accept or discard + response = session.post(url) return self._check_response(response) def proposal_view(self, event_id=None, proposal_id=None): - session = self.__prepare_session('json') + session = self.__prepare_session() if proposal_id is not None and event_id is not None: return {'error': 'You can only view an event ID or a proposal ID'} if event_id is not None: @@ -627,44 +713,112 @@ class PyMISP(object): return self.__query_proposal(session, 'view', id) def proposal_add(self, event_id, attribute): - session = self.__prepare_session('json') + session = self.__prepare_session() return self.__query_proposal(session, 'add', event_id, attribute) def proposal_edit(self, attribute_id, attribute): - session = self.__prepare_session('json') + session = self.__prepare_session() return self.__query_proposal(session, 'edit', attribute_id, attribute) def proposal_accept(self, proposal_id): - session = self.__prepare_session('json') + session = self.__prepare_session() return self.__query_proposal(session, 'accept', proposal_id) def proposal_discard(self, proposal_id): - session = self.__prepare_session('json') + session = self.__prepare_session() return self.__query_proposal(session, 'discard', proposal_id) + # ############################## + # ###### Attribute update ###### + # ############################## + + def change_toids(self, attribute_uuid, to_ids): + if to_ids not in [0, 1]: + raise Exception('to_ids can only be 0 or 1') + query = {"to_ids": to_ids} + session = self.__prepare_session() + return self.__query(session, 'edit/{}'.format(attribute_uuid), query, controller='attributes') + # ############################## # ######## REST Search ######### # ############################## - def __query(self, session, path, query): + def __query(self, session, path, query, controller='events'): if query.get('error') is not None: return query - url = urljoin(self.root_url, 'events/{}'.format(path.lstrip('/'))) - query = {'request': query} + if controller not in ['events', 'attributes']: + raise Exception('Invalid controller. Can only be {}'.format(', '.join(['events', 'attributes']))) + url = urljoin(self.root_url, '{}/{}'.format(controller, path.lstrip('/'))) + if self.debug: + print('URL: ', url) + print('Query: ', query) response = session.post(url, data=json.dumps(query)) return self._check_response(response) + def search_index(self, published=None, eventid=None, tag=None, datefrom=None, + dateto=None, eventinfo=None, threatlevel=None, distribution=None, + analysis=None, attribute=None, org=None, to_ids=False, deleted=False): + """Search only at the index level. Use ! infront of value as NOT, default OR + + :param published: Published (0,1) + :param eventid: Evend ID(s) | str or list + :param tag: Tag(s) | str or list + :param datefrom: First date, in format YYYY-MM-DD + :param dateto: Last date, in format YYYY-MM-DD + :param eventinfo: Event info(s) to match | str or list + :param threatlevel: Threat level(s) (1,2,3,4) | str or list + :param distribution: Distribution level(s) (0,1,2,3) | str or list + :param analysis: Analysis level(s) (0,1,2) | str or list + :param org: Organisation(s) | str or list + :param to_ids: + - false (default): include all attributes, no matter the to_ids flag + - true: include only to_ids attributes + - "exclude": exclude attributes marked to_ids + :param deleted: + - false (default): only include non deleted attributes + - true: include deleted attributes + - "only": ONLY include deleted attributes + """ + allowed = {'published': published, 'eventid': eventid, 'tag': tag, 'Dateto': dateto, + 'Datefrom': datefrom, 'eventinfo': eventinfo, 'threatlevel': threatlevel, + 'distribution': distribution, 'analysis': analysis, 'attribute': attribute, + 'org': org, 'to_ids': to_ids, 'deleted': deleted} + rule_levels = {'distribution': ["0", "1", "2", "3", "!0", "!1", "!2", "!3"], + 'threatlevel': ["1", "2", "3", "4", "!1", "!2", "!3", "!4"], + 'analysis': ["0", "1", "2", "!0", "!1", "!2"], + 'to_ids': ['True', 'False', 'exclude'], + 'deleted': ['True', 'False', 'only'], + } + buildup_url = "events/index" + + for rule in allowed.keys(): + if allowed[rule] is not None: + if not isinstance(allowed[rule], list): + allowed[rule] = [allowed[rule]] + allowed[rule] = [x for x in map(str, allowed[rule])] + if rule in rule_levels: + if not set(allowed[rule]).issubset(rule_levels[rule]): + raise SearchError('Values in your {} are invalid, has to be in {}'.format(rule, ', '.join(str(x) for x in rule_levels[rule]))) + if type(allowed[rule]) == list: + joined = '|'.join(str(x) for x in allowed[rule]) + buildup_url += '/search{}:{}'.format(rule, joined) + else: + buildup_url += '/search{}:{}'.format(rule, allowed[rule]) + session = self.__prepare_session() + url = urljoin(self.root_url, buildup_url) + response = session.get(url) + return self._check_response(response) + def search_all(self, value): query = {'value': value, 'searchall': 1} - session = self.__prepare_session('json') + session = self.__prepare_session() return self.__query(session, 'restSearch/download', query) def __prepare_rest_search(self, values, not_values): - """ - Prepare a search, generate the chain processed by the server + """Prepare a search, generate the chain processed by the server - :param values: Values to search - :param not_values: Values that should not be in the response + :param values: Values to search + :param not_values: Values that should not be in the response """ to_return = '' if values is not None: @@ -685,24 +839,24 @@ class PyMISP(object): def search(self, values=None, not_values=None, type_attribute=None, category=None, org=None, tags=None, not_tags=None, date_from=None, - date_to=None, last=None): - """ - Search via the Rest API - - :param values: values to search for - :param not_values: values *not* to search for - :param type_attribute: Type of attribute - :param category: Category to search - :param org: Org reporting the event - :param tags: Tags to search for - :param not_tags: Tags *not* to search for - :param date_from: First date - :param date_to: Last date - :param last: Last updated events (for example 5d or 12h or 30m) + date_to=None, last=None, metadata=None, uuid=None, controller='events'): + """Search via the Rest API + :param values: values to search for + :param not_values: values *not* to search for + :param type_attribute: Type of attribute + :param category: Category to search + :param org: Org reporting the event + :param tags: Tags to search for + :param not_tags: Tags *not* to search for + :param date_from: First date + :param date_to: Last date + :param last: Last updated events (for example 5d or 12h or 30m) + :param metadata: return onlymetadata if True + :param uuid: a valid uuid """ - val = self.__prepare_rest_search(values, not_values).replace('/', '|') - tag = self.__prepare_rest_search(tags, not_tags).replace(':', ';') + val = self.__prepare_rest_search(values, not_values) + tag = self.__prepare_rest_search(tags, not_tags) query = {} if len(val) != 0: query['value'] = val @@ -726,24 +880,30 @@ class PyMISP(object): query['to'] = date_to if last is not None: query['last'] = last + if metadata is not None: + query['metadata'] = metadata + if uuid is not None: + if self._valid_uuid(uuid): + query['uuid'] = uuid + else: + return {'error': 'You must enter a valid uuid.'} - session = self.__prepare_session('json') - return self.__query(session, 'restSearch/download', query) + session = self.__prepare_session() + return self.__query(session, 'restSearch/download', query, controller) - def get_attachement(self, event_id): - """ - Get attachement of an event (not sample) + def get_attachment(self, event_id): + """Get attachement of an event (not sample) - :param event_id: Event id from where the attachements will - be fetched + :param event_id: Event id from where the attachements will be fetched """ attach = urljoin(self.root_url, 'attributes/downloadAttachment/download/{}'.format(event_id)) - session = self.__prepare_session('json') - return session.get(attach) + session = self.__prepare_session() + response = session.get(attach) + return self._check_response(response) def get_yara(self, event_id): to_post = {'request': {'eventid': event_id, 'type': 'yara'}} - session = self.__prepare_session('json') + session = self.__prepare_session() response = session.post(urljoin(self.root_url, 'attributes/restSearch'), data=json.dumps(to_post)) result = self._check_response(response) if result.get('error') is not None: @@ -755,7 +915,7 @@ class PyMISP(object): def download_samples(self, sample_hash=None, event_id=None, all_samples=False): to_post = {'request': {'hash': sample_hash, 'eventID': event_id, 'allSamples': all_samples}} - session = self.__prepare_session('json') + session = self.__prepare_session() response = session.post(urljoin(self.root_url, 'attributes/downloadSample'), data=json.dumps(to_post)) result = self._check_response(response) if result.get('error') is not None: @@ -770,10 +930,10 @@ class PyMISP(object): archive = zipfile.ZipFile(zipped) try: # New format - unzipped = BytesIO(archive.open(f['md5'], pwd='infected').read()) + unzipped = BytesIO(archive.open(f['md5'], pwd=b'infected').read()) except KeyError: # Old format - unzipped = BytesIO(archive.open(f['filename'], pwd='infected').read()) + unzipped = BytesIO(archive.open(f['filename'], pwd=b'infected').read()) details.append([f['event_id'], f['filename'], unzipped]) except zipfile.BadZipfile: # In case the sample isn't zipped @@ -782,37 +942,35 @@ class PyMISP(object): return True, details def download_last(self, last): - """ - Download the last updated events. + """Download the last updated events. - :param last: can be defined in days, hours, minutes (for example 5d or 12h or 30m) + :param last: can be defined in days, hours, minutes (for example 5d or 12h or 30m) """ return self.search(last=last) # ############## Suricata ############### def download_all_suricata(self): - """ - Download all suricata rules events. - """ + """Download all suricata rules events.""" suricata_rules = urljoin(self.root_url, 'events/nids/suricata/download') session = self.__prepare_session('rules') - return session.get(suricata_rules) + response = session.get(suricata_rules) + return response def download_suricata_rule_event(self, event_id): - """ - Download one suricata rule event. + """Download one suricata rule event. - :param event_id: ID of the event to download (same as get) + :param event_id: ID of the event to download (same as get) """ template = urljoin(self.root_url, 'events/nids/suricata/download/{}'.format(event_id)) session = self.__prepare_session('rules') - return session.get(template) + response = session.get(template) + return response # ########## Tags ########## def get_all_tags(self, quiet=False): - session = self.__prepare_session('json') + session = self.__prepare_session() url = urljoin(self.root_url, 'tags') response = session.get(url) r = self._check_response(response) @@ -824,18 +982,21 @@ class PyMISP(object): to_return.append(tag['name']) return to_return + def new_tag(self, name=None, colour="#00ace6", exportable=False): + to_post = {'Tag': {'name': name, 'colour': colour, 'exportable': exportable}} + session = self.__prepare_session() + url = urljoin(self.root_url, 'tags/add') + response = session.post(url, data=json.dumps(to_post)) + return self._check_response(response) + # ########## Version ########## def get_api_version(self): - """ - Returns the current version of PyMISP installed on the system - """ + """Returns the current version of PyMISP installed on the system""" return {'version': __version__} def get_api_version_master(self): - """ - Get the most recent version of PyMISP from github - """ + """Get the most recent version of PyMISP from github""" r = requests.get('https://raw.githubusercontent.com/MISP/PyMISP/master/pymisp/__init__.py') if r.status_code == 200: version = re.findall("__version__ = '(.*)'", r.text) @@ -844,47 +1005,206 @@ class PyMISP(object): return {'error': 'Impossible to retrieve the version of the master branch.'} def get_version(self): - """ - Returns the version of the instance. - """ - session = self.__prepare_session('json') - url = urljoin(self.root_url, 'servers/getVersion') + """Returns the version of the instance.""" + session = self.__prepare_session() + url = urljoin(self.root_url, 'servers/getVersion.json') response = session.get(url) return self._check_response(response) def get_version_master(self): - """ - Get the most recent version from github - """ - r = requests.get('https://raw.githubusercontent.com/MISP/MISP/master/VERSION.json') + """Get the most recent version from github""" + r = requests.get('https://raw.githubusercontent.com/MISP/MISP/2.4/VERSION.json') if r.status_code == 200: master_version = json.loads(r.text) return {'version': '{}.{}.{}'.format(master_version['major'], master_version['minor'], master_version['hotfix'])} else: return {'error': 'Impossible to retrieve the version of the master branch.'} - # ############## Deprecated (Pure XML API should not be used) ################## + # ############## Export Attributes in text #################################### - @deprecated - def download_all(self): - """ - Download all event from the instance - """ - xml = urljoin(self.root_url, 'events/xml/download') - session = self.__prepare_session('xml') - return session.get(xml) + def get_all_attributes_txt(self, type_attr): + """Get all attributes from a specific type as plain text. Only published and IDS flagged attributes are exported.""" + session = self.__prepare_session('txt') + url = urljoin(self.root_url, 'attributes/text/download/%s' % type_attr) + response = session.get(url) + return response - @deprecated - def download(self, event_id, with_attachement=False): - """ - Download one event in XML + # ############## Statistics ################## - :param event_id: Event id of the event to download (same as get) - """ - if with_attachement: - attach = 'true' + def get_attributes_statistics(self, context='type', percentage=None): + """Get attributes statistics from the MISP instance""" + session = self.__prepare_session() + if (context != 'category'): + context = 'type' + if percentage is not None: + url = urljoin(self.root_url, 'attributes/attributeStatistics/{}/{}'.format(context, percentage)) else: - attach = 'false' - template = urljoin(self.root_url, 'events/xml/download/{}/{}'.format(event_id, attach)) - session = self.__prepare_session('xml') - return session.get(template) + url = urljoin(self.root_url, 'attributes/attributeStatistics/{}'.format(context)) + response = session.get(url) + return self._check_response(response) + + def get_tags_statistics(self, percentage=None, name_sort=None): + """Get tags statistics from the MISP instance""" + session = self.__prepare_session() + if percentage is not None: + percentage = 'true' + else: + percentage = 'false' + if name_sort is not None: + name_sort = 'true' + else: + name_sort = 'false' + url = urljoin(self.root_url, 'tags/tagStatistics/{}/{}'.format(percentage, name_sort)) + response = session.get(url) + return self._check_response(response) + + # ############## Sightings ################## + + def sighting_per_id(self, attribute_id): + session = self.__prepare_session() + url = urljoin(self.root_url, 'sightings/add/{}'.format(attribute_id)) + response = session.post(url) + return self._check_response(response) + + def sighting_per_uuid(self, attribute_uuid): + session = self.__prepare_session() + url = urljoin(self.root_url, 'sightings/add/{}'.format(attribute_uuid)) + response = session.post(url) + return self._check_response(response) + + def sighting_per_json(self, json_file): + session = self.__prepare_session() + jdata = json.load(open(json_file)) + url = urljoin(self.root_url, 'sightings/add/') + response = session.post(url, data=json.dumps(jdata)) + return self._check_response(response) + + # ############## Sharing Groups ################## + + def get_sharing_groups(self): + session = self.__prepare_session() + url = urljoin(self.root_url, 'sharing_groups.json') + response = session.get(url) + return self._check_response(response)['response'] + + # ############## Users ################## + + def _set_user_parameters(self, email, org_id, role_id, password, external_auth_required, + external_auth_key, enable_password, nids_sid, server_id, + gpgkey, certif_public, autoalert, contactalert, disabled, + change_pw, termsaccepted, newsread): + user = {} + if email is not None: + user['email'] = email + if org_id is not None: + user['org_id'] = org_id + if role_id is not None: + user['role_id'] = role_id + if password is not None: + user['password'] = password + if external_auth_required is not None: + user['external_auth_required'] = external_auth_required + if external_auth_key is not None: + user['external_auth_key'] = external_auth_key + if enable_password is not None: + user['enable_password'] = enable_password + if nids_sid is not None: + user['nids_sid'] = nids_sid + if server_id is not None: + user['server_id'] = server_id + if gpgkey is not None: + user['gpgkey'] = gpgkey + if certif_public is not None: + user['certif_public'] = certif_public + if autoalert is not None: + user['autoalert'] = autoalert + if contactalert is not None: + user['contactalert'] = contactalert + if disabled is not None: + user['disabled'] = disabled + if change_pw is not None: + user['change_pw'] = change_pw + if termsaccepted is not None: + user['termsaccepted'] = termsaccepted + if newsread is not None: + user['newsread'] = newsread + return user + + def get_users_list(self): + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users') + response = session.get(url) + return self._check_response(response)['response'] + + def get_user(self, user_id): + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users/view/{}'.format(user_id)) + response = session.get(url) + return self._check_response(response) + + def add_user(self, email, org_id, role_id, password=None, + external_auth_required=None, external_auth_key=None, + enable_password=None, nids_sid=None, server_id=None, + gpgkey=None, certif_public=None, autoalert=None, + contactalert=None, disabled=None, change_pw=None, + termsaccepted=None, newsread=None): + new_user = self._set_user_parameters(email, org_id, role_id, password, + external_auth_required, external_auth_key, + enable_password, nids_sid, server_id, + gpgkey, certif_public, autoalert, + contactalert, disabled, change_pw, + termsaccepted, newsread) + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users/add/') + response = session.post(url, data=json.dumps(new_user)) + return self._check_response(response) + + def add_user_json(self, json_file): + session = self.__prepare_session() + jdata = json.load(open(json_file)) + url = urljoin(self.root_url, 'admin/users/add/') + response = session.post(url, data=json.dumps(jdata)) + return self._check_response(response) + + def get_add_user_fields_list(self): + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users/add/') + response = session.get(url) + return self._check_response(response) + + def edit_user(self, user_id, email=None, org_id=None, role_id=None, + password=None, external_auth_required=None, + external_auth_key=None, enable_password=None, nids_sid=None, + server_id=None, gpgkey=None, certif_public=None, + autoalert=None, contactalert=None, disabled=None, + change_pw=None, termsaccepted=None, newsread=None): + edit_user = self._set_user_parameters(email, org_id, role_id, password, + external_auth_required, external_auth_key, + enable_password, nids_sid, server_id, + gpgkey, certif_public, autoalert, + contactalert, disabled, change_pw, + termsaccepted, newsread) + + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users/edit/{}'.format(user_id)) + response = session.post(url, data=json.dumps(edit_user)) + return self._check_response(response) + + def edit_user_json(self, json_file, user_id): + session = self.__prepare_session() + jdata = json.load(open(json_file)) + url = urljoin(self.root_url, 'admin/users/edit/{}'.format(user_id)) + response = session.post(url, data=json.dumps(jdata)) + return self._check_response(response) + + def get_edit_user_fields_list(self, user_id): + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users/edit/{}'.format(user_id)) + response = session.get(url) + return self._check_response(response) + + def delete_user(self, user_id): + session = self.__prepare_session() + url = urljoin(self.root_url, 'admin/users/delete/{}'.format(user_id)) + response = session.post(url) + return self._check_response(response) diff --git a/pymisp/data/describeTypes.json b/pymisp/data/describeTypes.json new file mode 100644 index 0000000..820341e --- /dev/null +++ b/pymisp/data/describeTypes.json @@ -0,0 +1,706 @@ +{ + "result": { + "sane_defaults": { + "md5": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha1": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha256": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "pdb": { + "default_category": "Artifacts dropped", + "to_ids": 0 + }, + "filename|md5": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha1": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha256": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "ip-src": { + "default_category": "Network activity", + "to_ids": 1 + }, + "ip-dst": { + "default_category": "Network activity", + "to_ids": 1 + }, + "hostname": { + "default_category": "Network activity", + "to_ids": 1 + }, + "domain": { + "default_category": "Network activity", + "to_ids": 1 + }, + "domain|ip": { + "default_category": "Network activity", + "to_ids": 1 + }, + "email-src": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "email-dst": { + "default_category": "Network activity", + "to_ids": 1 + }, + "email-subject": { + "default_category": "Payload delivery", + "to_ids": 0 + }, + "email-attachment": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "url": { + "default_category": "External analysis", + "to_ids": 1 + }, + "http-method": { + "default_category": "Network activity", + "to_ids": 0 + }, + "user-agent": { + "default_category": "Network activity", + "to_ids": 0 + }, + "regkey": { + "default_category": "Persistence mechanism", + "to_ids": 1 + }, + "regkey|value": { + "default_category": "Persistence mechanism", + "to_ids": 1 + }, + "AS": { + "default_category": "Network activity", + "to_ids": 0 + }, + "snort": { + "default_category": "Network activity", + "to_ids": 1 + }, + "pattern-in-file": { + "default_category": "Payload installation", + "to_ids": 1 + }, + "pattern-in-traffic": { + "default_category": "Network activity", + "to_ids": 1 + }, + "pattern-in-memory": { + "default_category": "Payload installation", + "to_ids": 1 + }, + "yara": { + "default_category": "Payload installation", + "to_ids": 1 + }, + "vulnerability": { + "default_category": "External analysis", + "to_ids": 0 + }, + "attachment": { + "default_category": "External analysis", + "to_ids": 0 + }, + "malware-sample": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "link": { + "default_category": "External analysis", + "to_ids": 0 + }, + "comment": { + "default_category": "Other", + "to_ids": 0 + }, + "text": { + "default_category": "Other", + "to_ids": 0 + }, + "other": { + "default_category": "Other", + "to_ids": 0 + }, + "named pipe": { + "default_category": "Artifacts dropped", + "to_ids": 0 + }, + "mutex": { + "default_category": "Artifacts dropped", + "to_ids": 1 + }, + "target-user": { + "default_category": "Targeting data", + "to_ids": 0 + }, + "target-email": { + "default_category": "Targeting data", + "to_ids": 0 + }, + "target-machine": { + "default_category": "Targeting data", + "to_ids": 0 + }, + "target-org": { + "default_category": "Targeting data", + "to_ids": 0 + }, + "target-location": { + "default_category": "Targeting data", + "to_ids": 0 + }, + "target-external": { + "default_category": "Targeting data", + "to_ids": 0 + }, + "btc": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "iban": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "bic": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "bank-account-nr": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "aba-rtn": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "bin": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "cc-number": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "prtn": { + "default_category": "Financial fraud", + "to_ids": 1 + }, + "threat-actor": { + "default_category": "Attribution", + "to_ids": 0 + }, + "campaign-name": { + "default_category": "Attribution", + "to_ids": 0 + }, + "campaign-id": { + "default_category": "Attribution", + "to_ids": 0 + }, + "malware-type": { + "default_category": "Payload delivery", + "to_ids": 0 + }, + "uri": { + "default_category": "Network activity", + "to_ids": 1 + }, + "authentihash": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "ssdeep": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "imphash": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "pehash": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha224": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha384": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha512": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha512/224": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "sha512/256": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "tlsh": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|authentihash": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|ssdeep": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|imphash": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|pehash": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha224": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha384": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha512": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha512/224": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|sha512/256": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "filename|tlsh": { + "default_category": "Payload delivery", + "to_ids": 1 + }, + "windows-scheduled-task": { + "default_category": "Artifacts dropped", + "to_ids": 0 + }, + "windows-service-name": { + "default_category": "Artifacts dropped", + "to_ids": 0 + }, + "windows-service-displayname": { + "default_category": "Artifacts dropped", + "to_ids": 0 + }, + "whois-registrant-email": { + "default_category": "Attribution", + "to_ids": 0 + }, + "whois-registrant-phone": { + "default_category": "Attribution", + "to_ids": 0 + }, + "whois-registrant-name": { + "default_category": "Attribution", + "to_ids": 0 + }, + "whois-registrar": { + "default_category": "Attribution", + "to_ids": 0 + }, + "whois-creation-date": { + "default_category": "Attribution", + "to_ids": 0 + }, + "x509-fingerprint-sha1": { + "default_category": "Network activity", + "to_ids": 1 + } + }, + "types": [ + "md5", + "sha1", + "sha256", + "filename", + "pdb", + "filename|md5", + "filename|sha1", + "filename|sha256", + "ip-src", + "ip-dst", + "hostname", + "domain", + "domain|ip", + "email-src", + "email-dst", + "email-subject", + "email-attachment", + "url", + "http-method", + "user-agent", + "regkey", + "regkey|value", + "AS", + "snort", + "pattern-in-file", + "pattern-in-traffic", + "pattern-in-memory", + "yara", + "vulnerability", + "attachment", + "malware-sample", + "link", + "comment", + "text", + "other", + "named pipe", + "mutex", + "target-user", + "target-email", + "target-machine", + "target-org", + "target-location", + "target-external", + "btc", + "iban", + "bic", + "bank-account-nr", + "aba-rtn", + "bin", + "cc-number", + "prtn", + "threat-actor", + "campaign-name", + "campaign-id", + "malware-type", + "uri", + "authentihash", + "ssdeep", + "imphash", + "pehash", + "sha224", + "sha384", + "sha512", + "sha512/224", + "sha512/256", + "tlsh", + "filename|authentihash", + "filename|ssdeep", + "filename|imphash", + "filename|pehash", + "filename|sha224", + "filename|sha384", + "filename|sha512", + "filename|sha512/224", + "filename|sha512/256", + "filename|tlsh", + "windows-scheduled-task", + "windows-service-name", + "windows-service-displayname", + "whois-registrant-email", + "whois-registrant-phone", + "whois-registrant-name", + "whois-registrar", + "whois-creation-date", + "x509-fingerprint-sha1" + ], + "categories": [ + "Internal reference", + "Targeting data", + "Antivirus detection", + "Payload delivery", + "Artifacts dropped", + "Payload installation", + "Persistence mechanism", + "Network activity", + "Payload type", + "Attribution", + "External analysis", + "Financial fraud", + "Other" + ], + "category_type_mappings": { + "Internal reference": [ + "text", + "link", + "comment", + "other" + ], + "Targeting data": [ + "target-user", + "target-email", + "target-machine", + "target-org", + "target-location", + "target-external", + "comment" + ], + "Antivirus detection": [ + "link", + "comment", + "text", + "attachment", + "other" + ], + "Payload delivery": [ + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "sha512/224", + "sha512/256", + "ssdeep", + "imphash", + "authentihash", + "pehash", + "tlsh", + "filename", + "filename|md5", + "filename|sha1", + "filename|sha224", + "filename|sha256", + "filename|sha384", + "filename|sha512", + "filename|sha512/224", + "filename|sha512/256", + "filename|authentihash", + "filename|ssdeep", + "filename|tlsh", + "filename|imphash", + "filename|pehash", + "ip-src", + "ip-dst", + "hostname", + "domain", + "email-src", + "email-dst", + "email-subject", + "email-attachment", + "url", + "user-agent", + "AS", + "pattern-in-file", + "pattern-in-traffic", + "yara", + "attachment", + "malware-sample", + "link", + "malware-type", + "comment", + "text", + "vulnerability", + "x509-fingerprint-sha1", + "other" + ], + "Artifacts dropped": [ + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "sha512/224", + "sha512/256", + "ssdeep", + "imphash", + "authentihash", + "filename", + "filename|md5", + "filename|sha1", + "filename|sha224", + "filename|sha256", + "filename|sha384", + "filename|sha512", + "filename|sha512/224", + "filename|sha512/256", + "filename|authentihash", + "filename|ssdeep", + "filename|tlsh", + "filename|imphash", + "filename|pehash", + "regkey", + "regkey|value", + "pattern-in-file", + "pattern-in-memory", + "pdb", + "yara", + "attachment", + "malware-sample", + "named pipe", + "mutex", + "windows-scheduled-task", + "windows-service-name", + "windows-service-displayname", + "comment", + "text", + "x509-fingerprint-sha1", + "other" + ], + "Payload installation": [ + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "sha512/224", + "sha512/256", + "ssdeep", + "imphash", + "authentihash", + "pehash", + "tlsh", + "filename", + "filename|md5", + "filename|sha1", + "filename|sha224", + "filename|sha256", + "filename|sha384", + "filename|sha512", + "filename|sha512/224", + "filename|sha512/256", + "filename|authentihash", + "filename|ssdeep", + "filename|tlsh", + "filename|imphash", + "filename|pehash", + "pattern-in-file", + "pattern-in-traffic", + "pattern-in-memory", + "yara", + "vulnerability", + "attachment", + "malware-sample", + "malware-type", + "comment", + "text", + "x509-fingerprint-sha1", + "other" + ], + "Persistence mechanism": [ + "filename", + "regkey", + "regkey|value", + "comment", + "text", + "other" + ], + "Network activity": [ + "ip-src", + "ip-dst", + "hostname", + "domain", + "domain|ip", + "email-dst", + "url", + "uri", + "user-agent", + "http-method", + "AS", + "snort", + "pattern-in-file", + "pattern-in-traffic", + "attachment", + "comment", + "text", + "x509-fingerprint-sha1", + "other" + ], + "Payload type": [ + "comment", + "text", + "other" + ], + "Attribution": [ + "threat-actor", + "campaign-name", + "campaign-id", + "whois-registrant-phone", + "whois-registrant-email", + "whois-registrant-name", + "whois-registrar", + "whois-creation-date", + "comment", + "text", + "x509-fingerprint-sha1", + "other" + ], + "External analysis": [ + "md5", + "sha1", + "sha256", + "filename", + "filename|md5", + "filename|sha1", + "filename|sha256", + "ip-src", + "ip-dst", + "hostname", + "domain", + "domain|ip", + "url", + "user-agent", + "regkey", + "regkey|value", + "AS", + "snort", + "pattern-in-file", + "pattern-in-traffic", + "pattern-in-memory", + "vulnerability", + "attachment", + "malware-sample", + "link", + "comment", + "text", + "x509-fingerprint-sha1", + "other" + ], + "Financial fraud": [ + "btc", + "iban", + "bic", + "bank-account-nr", + "aba-rtn", + "bin", + "cc-number", + "prtn", + "comment", + "text", + "other" + ], + "Other": [ + "comment", + "text", + "other" + ] + } + } +} diff --git a/pymisp/data/schema-lax.json b/pymisp/data/schema-lax.json new file mode 100644 index 0000000..dd4af2f --- /dev/null +++ b/pymisp/data/schema-lax.json @@ -0,0 +1,321 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json", + "type": "object", + "properties": { + "Event": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/id", + "type": "string" + }, + "orgc_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/orgc_id", + "type": "string" + }, + "org_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/org_id", + "type": "string" + }, + "date": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/date", + "type": "string" + }, + "threat_level_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/threat_level_id", + "type": "string" + }, + "info": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/info", + "type": "string" + }, + "published": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/published", + "type": "boolean" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/uuid", + "type": "string" + }, + "attribute_count": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/attribute_count", + "type": "string" + }, + "analysis": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/analysis", + "type": "string" + }, + "timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/timestamp", + "type": "string" + }, + "distribution": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/distribution", + "type": "string" + }, + "proposal_email_lock": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/proposal_email_lock", + "type": "boolean" + }, + "locked": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/locked", + "type": "boolean" + }, + "publish_timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/publish_timestamp", + "type": "string" + }, + "sharing_group_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/sharing_group_id", + "type": "string" + }, + "Org": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/uuid", + "type": "string" + } + } + }, + "Orgc": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/uuid", + "type": "string" + } + } + }, + "Attribute": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute", + "type": "array", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/id", + "type": "string" + }, + "type": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/type", + "type": "string" + }, + "category": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/category", + "type": "string" + }, + "to_ids": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/to_ids", + "type": "boolean" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/uuid", + "type": "string" + }, + "event_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/event_id", + "type": "string" + }, + "distribution": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/distribution", + "type": "string" + }, + "timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/timestamp", + "type": "string" + }, + "comment": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/comment", + "type": "string" + }, + "sharing_group_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/sharing_group_id", + "type": "string" + }, + "value": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/value", + "type": "string" + }, + "SharingGroup": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/SharingGroup", + "type": "array", + "items": {}, + "additionalItems": false + }, + "ShadowAttribute": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/ShadowAttribute", + "type": "array", + "items": {}, + "additionalItems": false + } + } + }, + "additionalItems": false + }, + "ShadowAttribute": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/ShadowAttribute", + "type": "array", + "items": {}, + "additionalItems": false + }, + "RelatedEvent": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent", + "type": "array", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0", + "type": "object", + "properties": { + "Org": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/uuid", + "type": "string" + } + } + }, + "Orgc": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/uuid", + "type": "string" + } + } + }, + "Event": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event", + "type": "object", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/id", + "type": "string" + }, + "date": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/date", + "type": "string" + }, + "threat_level_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/threat_level_id", + "type": "string" + }, + "info": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/info", + "type": "string" + }, + "published": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/published", + "type": "boolean" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/uuid", + "type": "string" + }, + "analysis": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/analysis", + "type": "string" + }, + "timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/timestamp", + "type": "string" + }, + "distribution": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/distribution", + "type": "string" + }, + "org_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/org_id", + "type": "string" + }, + "orgc_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/orgc_id", + "type": "string" + } + } + }, + "additionalItems": false + } + } + }, + "additionalItems": false + }, + "Tag": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag", + "type": "array", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/name", + "type": "string" + }, + "colour": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/colour", + "type": "string" + }, + "exportable": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/exportable", + "type": "boolean" + } + } + }, + "additionalItems": false + } + }, + "required": [ + "info" + ] + } + }, + "required": [ + "Event" + ] +} diff --git a/pymisp/data/schema.json b/pymisp/data/schema.json new file mode 100644 index 0000000..85f3e3d --- /dev/null +++ b/pymisp/data/schema.json @@ -0,0 +1,327 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json", + "type": "object", + "properties": { + "Event": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/id", + "type": "string" + }, + "orgc_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/orgc_id", + "type": "string" + }, + "org_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/org_id", + "type": "string" + }, + "date": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/date", + "type": "string" + }, + "threat_level_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/threat_level_id", + "type": "string" + }, + "info": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/info", + "type": "string" + }, + "published": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/published", + "type": "boolean" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/uuid", + "type": "string" + }, + "attribute_count": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/attribute_count", + "type": "string" + }, + "analysis": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/analysis", + "type": "string" + }, + "timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/timestamp", + "type": "string" + }, + "distribution": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/distribution", + "type": "string" + }, + "proposal_email_lock": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/proposal_email_lock", + "type": "boolean" + }, + "locked": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/locked", + "type": "boolean" + }, + "publish_timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/publish_timestamp", + "type": "string" + }, + "sharing_group_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/sharing_group_id", + "type": "string" + }, + "Org": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Org/uuid", + "type": "string" + } + } + }, + "Orgc": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Orgc/uuid", + "type": "string" + } + } + }, + "Attribute": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute", + "type": "array", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/id", + "type": "string" + }, + "type": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/type", + "type": "string" + }, + "category": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/category", + "type": "string" + }, + "to_ids": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/to_ids", + "type": "boolean" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/uuid", + "type": "string" + }, + "event_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/event_id", + "type": "string" + }, + "distribution": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/distribution", + "type": "string" + }, + "timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/timestamp", + "type": "string" + }, + "comment": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/comment", + "type": "string" + }, + "sharing_group_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/sharing_group_id", + "type": "string" + }, + "value": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/value", + "type": "string" + }, + "SharingGroup": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/SharingGroup", + "type": "array", + "items": {}, + "additionalItems": false + }, + "ShadowAttribute": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Attribute/17/ShadowAttribute", + "type": "array", + "items": {}, + "additionalItems": false + } + } + }, + "additionalItems": false + }, + "ShadowAttribute": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/ShadowAttribute", + "type": "array", + "items": {}, + "additionalItems": false + }, + "RelatedEvent": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent", + "type": "array", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0", + "type": "object", + "properties": { + "Org": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Org/uuid", + "type": "string" + } + } + }, + "Orgc": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/name", + "type": "string" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Orgc/uuid", + "type": "string" + } + } + }, + "Event": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event", + "type": "object", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/id", + "type": "string" + }, + "date": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/date", + "type": "string" + }, + "threat_level_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/threat_level_id", + "type": "string" + }, + "info": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/info", + "type": "string" + }, + "published": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/published", + "type": "boolean" + }, + "uuid": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/uuid", + "type": "string" + }, + "analysis": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/analysis", + "type": "string" + }, + "timestamp": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/timestamp", + "type": "string" + }, + "distribution": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/distribution", + "type": "string" + }, + "org_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/org_id", + "type": "string" + }, + "orgc_id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/RelatedEvent/0/Event/0/orgc_id", + "type": "string" + } + } + }, + "additionalItems": false + } + } + }, + "additionalItems": false + }, + "Tag": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag", + "type": "array", + "items": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2", + "type": "object", + "properties": { + "id": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/id", + "type": "string" + }, + "name": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/name", + "type": "string" + }, + "colour": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/colour", + "type": "string" + }, + "exportable": { + "id": "https://www.github.com/MISP/MISP/format/2.4/schema.json/Event/Tag/2/exportable", + "type": "boolean" + } + } + }, + "additionalItems": false + } + }, + "required": [ + "date", + "threat_level_id", + "info", + "published", + "analysis", + "distribution", + "Attribute" + ] + } + }, + "required": [ + "Event" + ] +} diff --git a/pymisp/exceptions.py b/pymisp/exceptions.py new file mode 100644 index 0000000..f4db340 --- /dev/null +++ b/pymisp/exceptions.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +class PyMISPError(Exception): + def __init__(self, message): + super(PyMISPError, self).__init__(message) + self.message = message + + +class NewEventError(PyMISPError): + pass + + +class NewAttributeError(PyMISPError): + pass + + +class SearchError(PyMISPError): + pass + + +class MissingDependency(PyMISPError): + pass + + +class NoURL(PyMISPError): + pass + + +class NoKey(PyMISPError): + pass diff --git a/pymisp/mispevent.py b/pymisp/mispevent.py new file mode 100644 index 0000000..db52ced --- /dev/null +++ b/pymisp/mispevent.py @@ -0,0 +1,571 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import datetime +import time +import json +from json import JSONEncoder +import os +import warnings +import base64 +from io import BytesIO +from zipfile import ZipFile +import hashlib + +try: + from dateutil.parser import parse +except ImportError: + pass + +try: + import jsonschema +except ImportError: + pass + +try: + # pyme renamed to gpg the 2016-10-28 + import gpg + from gpg.constants.sig import mode + has_pyme = True +except ImportError: + try: + # pyme renamed to gpg the 2016-10-28 + import pyme as gpg + from pyme.constants.sig import mode + has_pyme = True + except ImportError: + has_pyme = False + +from .exceptions import PyMISPError, NewEventError, NewAttributeError + +# Least dirty way to support python 2 and 3 +try: + basestring + unicode + warnings.warn("You're using python 2, it is strongly recommended to use python >=3.4") +except NameError: + basestring = str + unicode = str + + +class MISPAttribute(object): + + def __init__(self, describe_types): + self.categories = describe_types['categories'] + self.types = describe_types['types'] + self.category_type_mapping = describe_types['category_type_mappings'] + self.sane_default = describe_types['sane_defaults'] + self._reinitialize_attribute() + + def _reinitialize_attribute(self): + # Default values + self.category = None + self.type = None + self.value = None + self.to_ids = False + self.comment = '' + self.distribution = 5 + + # other possible values + self.data = None + self.encrypt = False + self.id = None + self.uuid = None + self.timestamp = None + self.sharing_group_id = None + self.deleted = None + self.sig = None + self.SharingGroup = [] + self.ShadowAttribute = [] + self.disable_correlation = False + self.RelatedAttribute = [] + self.Tag = [] + + def _serialize(self): + return '{type}{category}{to_ids}{uuid}{timestamp}{comment}{deleted}{value}'.format( + type=self.type, category=self.category, to_ids=self.to_ids, uuid=self.uuid, timestamp=self.timestamp, + comment=self.comment, deleted=self.deleted, value=self.value).encode() + + def sign(self, gpg_uid, passphrase=None): + if not has_pyme: + raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.') + to_sign = self._serialize() + with gpg.Context() as c: + keys = list(c.keylist(gpg_uid)) + c.signers = keys[:1] + if passphrase: + c.set_passphrase_cb(lambda *args: passphrase) + signed, _ = c.sign(to_sign, mode=mode.DETACH) + self.sig = base64.b64encode(signed).decode() + + def delete(self): + self.deleted = True + + def verify(self, gpg_uid): + if not has_pyme: + raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.') + signed_data = self._serialize() + with gpg.Context() as c: + keys = list(c.keylist(gpg_uid)) + try: + c.verify(signed_data, signature=base64.b64decode(self.sig), verify=keys[:1]) + return {self.uuid: True} + except: + return {self.uuid: False} + + def set_all_values(self, **kwargs): + if kwargs.get('type') and kwargs.get('category'): + if kwargs['type'] not in self.category_type_mapping[kwargs['category']]: + raise NewAttributeError('{} and {} is an invalid combinaison, type for this category has to be in {}'.format(self.type, self.category, (', '.join(self.category_type_mapping[kwargs['category']])))) + # Required + if kwargs.get('type'): + self.type = kwargs['type'] + if self.type not in self.types: + raise NewAttributeError('{} is invalid, type has to be in {}'.format(self.type, (', '.join(self.types)))) + elif not self.type: + raise NewAttributeError('The type of the attribute is required.') + + type_defaults = self.sane_default[self.type] + + if kwargs.get('value'): + self.value = kwargs['value'] + elif not self.value: + raise NewAttributeError('The value of the attribute is required.') + + # Default values + if kwargs.get('category'): + self.category = kwargs['category'] + if self.category not in self.categories: + raise NewAttributeError('{} is invalid, category has to be in {}'.format(self.category, (', '.join(self.categories)))) + else: + self.category = type_defaults['default_category'] + + if kwargs.get('to_ids'): + self.to_ids = kwargs['to_ids'] + if not isinstance(self.to_ids, bool): + raise NewAttributeError('{} is invalid, to_ids has to be True or False'.format(self.to_ids)) + else: + self.to_ids = bool(int(type_defaults['to_ids'])) + if kwargs.get('comment'): + self.comment = kwargs['comment'] + if kwargs.get('distribution'): + self.distribution = int(kwargs['distribution']) + if self.distribution not in [0, 1, 2, 3, 4, 5]: + raise NewAttributeError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4, 5'.format(self.distribution)) + + # other possible values + if kwargs.get('data'): + self.data = kwargs['data'] + self._load_data() + if kwargs.get('id'): + self.id = int(kwargs['id']) + if kwargs.get('uuid'): + self.uuid = kwargs['uuid'] + if kwargs.get('timestamp'): + self.timestamp = datetime.datetime.fromtimestamp(int(kwargs['timestamp'])) + if kwargs.get('sharing_group_id'): + self.sharing_group_id = int(kwargs['sharing_group_id']) + if kwargs.get('deleted'): + self.deleted = kwargs['deleted'] + if kwargs.get('SharingGroup'): + self.SharingGroup = kwargs['SharingGroup'] + if kwargs.get('ShadowAttribute'): + self.ShadowAttribute = kwargs['ShadowAttribute'] + if kwargs.get('sig'): + self.sig = kwargs['sig'] + if kwargs.get('Tag'): + self.Tag = kwargs['Tag'] + + # If the user wants to disable correlation, let them. Defaults to False. + self.disable_correlation = kwargs.get("disable_correlation", False) + + def _prepare_new_malware_sample(self): + if '|' in self.value: + # Get the filename, ignore the md5, because humans. + self.malware_filename, md5 = self.value.split('|') + else: + # Assuming the user only passed the filename + self.malware_filename = self.value + m = hashlib.md5() + m.update(self.data.getvalue()) + md5 = m.hexdigest() + self.value = '{}|{}'.format(self.malware_filename, md5) + self.malware_binary = self.data + self.encrypt = True + + def _load_data(self): + if not isinstance(self.data, BytesIO): + self.data = BytesIO(base64.b64decode(self.data)) + if self.type == 'malware-sample': + try: + with ZipFile(self.data) as f: + for name in f.namelist(): + if name.endswith('.txt'): + with f.open(name, pwd=b'infected') as unpacked: + self.malware_filename = unpacked.read().decode() + else: + with f.open(name, pwd=b'infected') as unpacked: + self.malware_binary = BytesIO(unpacked.read()) + except: + # not a encrypted zip file, assuming it is a new malware sample + self._prepare_new_malware_sample() + + def _json(self): + to_return = {'type': self.type, 'category': self.category, 'to_ids': self.to_ids, + 'distribution': self.distribution, 'value': self.value, + 'comment': self.comment, 'disable_correlation': self.disable_correlation} + if self.sig: + to_return['sig'] = self.sig + if self.sharing_group_id: + to_return['sharing_group_id'] = self.sharing_group_id + if self.Tag: + to_return['Tag'] = self.Tag + if self.data: + to_return['data'] = base64.b64encode(self.data.getvalue()).decode() + if self.encrypt: + to_return['entrypt'] = self.encrypt + to_return = _int_to_str(to_return) + return to_return + + def _json_full(self): + to_return = self._json() + if self.id: + to_return['id'] = self.id + if self.uuid: + to_return['uuid'] = self.uuid + if self.timestamp: + to_return['timestamp'] = int(time.mktime(self.timestamp.timetuple())) + if self.deleted is not None: + to_return['deleted'] = self.deleted + if self.ShadowAttribute: + to_return['ShadowAttribute'] = self.ShadowAttribute + if self.SharingGroup: + to_return['SharingGroup'] = self.SharingGroup + to_return = _int_to_str(to_return) + return to_return + + +class EncodeUpdate(JSONEncoder): + def default(self, obj): + try: + return obj._json() + except AttributeError: + return JSONEncoder.default(self, obj) + + +class EncodeFull(JSONEncoder): + def default(self, obj): + try: + return obj._json_full() + except AttributeError: + return JSONEncoder.default(self, obj) + + +def _int_to_str(d): + # transform all integer back to string + for k, v in d.items(): + if isinstance(v, int) and not isinstance(v, bool): + d[k] = str(v) + return d + + +class MISPEvent(object): + + def __init__(self, describe_types=None): + self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') + with open(os.path.join(self.ressources_path, 'schema.json'), 'r') as f: + self.json_schema = json.load(f) + with open(os.path.join(self.ressources_path, 'schema-lax.json'), 'r') as f: + self.json_schema_lax = json.load(f) + if not describe_types: + t = json.load(open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r')) + describe_types = t['result'] + self.describe_types = describe_types + self.categories = describe_types['categories'] + self.types = describe_types['types'] + self.category_type_mapping = describe_types['category_type_mappings'] + self.sane_default = describe_types['sane_defaults'] + self.new = True + self.dump_full = False + + self._reinitialize_event() + + def _reinitialize_event(self): + # Default values for a valid event to send to a MISP instance + self.distribution = 3 + self.threat_level_id = 2 + self.analysis = 0 + self.info = None + self.published = False + self.date = datetime.date.today() + self.attributes = [] + + # All other keys + self.sig = None + self.global_sig = None + self.id = None + self.orgc_id = None + self.org_id = None + self.uuid = None + self.attribute_count = None + self.timestamp = None + self.proposal_email_lock = None + self.locked = None + self.publish_timestamp = None + self.sharing_group_id = None + self.Org = None + self.Orgc = None + self.ShadowAttribute = [] + self.RelatedEvent = [] + self.Tag = [] + self.Galaxy = None + + def _serialize(self): + return '{date}{threat_level_id}{info}{uuid}{analysis}{timestamp}'.format( + date=self.date, threat_level_id=self.threat_level_id, info=self.info, + uuid=self.uuid, analysis=self.analysis, timestamp=self.timestamp).encode() + + def _serialize_sigs(self): + all_sigs = self.sig + for a in self.attributes: + all_sigs += a.sig + return all_sigs.encode() + + def sign(self, gpg_uid, passphrase=None): + if not has_pyme: + raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.') + to_sign = self._serialize() + with gpg.Context() as c: + keys = list(c.keylist(gpg_uid)) + c.signers = keys[:1] + if passphrase: + c.set_passphrase_cb(lambda *args: passphrase) + signed, _ = c.sign(to_sign, mode=mode.DETACH) + self.sig = base64.b64encode(signed).decode() + for a in self.attributes: + a.sign(gpg_uid, passphrase) + to_sign_global = self._serialize_sigs() + with gpg.Context() as c: + keys = list(c.keylist(gpg_uid)) + c.signers = keys[:1] + if passphrase: + c.set_passphrase_cb(lambda *args: passphrase) + signed, _ = c.sign(to_sign_global, mode=mode.DETACH) + self.global_sig = base64.b64encode(signed).decode() + + def verify(self, gpg_uid): + if not has_pyme: + raise Exception('pyme is required, please install: pip install --pre pyme3. You will also need libgpg-error-dev and libgpgme11-dev.') + to_return = {} + signed_data = self._serialize() + with gpg.Context() as c: + keys = list(c.keylist(gpg_uid)) + try: + c.verify(signed_data, signature=base64.b64decode(self.sig), verify=keys[:1]) + to_return[self.uuid] = True + except: + to_return[self.uuid] = False + for a in self.attributes: + to_return.update(a.verify(gpg_uid)) + to_verify_global = self._serialize_sigs() + with gpg.Context() as c: + keys = list(c.keylist(gpg_uid)) + try: + c.verify(to_verify_global, signature=base64.b64decode(self.global_sig), verify=keys[:1]) + to_return['global'] = True + except: + to_return['global'] = False + return to_return + + def load(self, json_event): + self.new = False + self.dump_full = True + if isinstance(json_event, basestring) and os.path.exists(json_event): + # NOTE: is it a good idea? (possible security issue if an untrusted user call this method) + json_event = open(json_event, 'r') + if hasattr(json_event, 'read'): + # python2 and python3 compatible to find if we have a file + json_event = json_event.read() + if isinstance(json_event, basestring): + json_event = json.loads(json_event) + if json_event.get('response'): + event = json_event.get('response')[0] + else: + event = json_event + if not event: + raise PyMISPError('Invalid event') + # Invalid event created by MISP up to 2.4.52 (attribute_count is none instead of '0') + if event.get('Event') and event.get('Event').get('attribute_count') is None: + event['Event']['attribute_count'] = '0' + jsonschema.validate(event, self.json_schema_lax) + e = event.get('Event') + self._reinitialize_event() + self.set_all_values(**e) + + def set_date(self, date, ignore_invalid=False): + if isinstance(date, basestring) or isinstance(date, unicode): + self.date = parse(date).date() + elif isinstance(date, datetime.datetime): + self.date = date.date() + elif isinstance(date, datetime.date): + self.date = date + else: + if ignore_invalid: + self.date = datetime.date.today() + else: + raise NewEventError('Invalid format for the date: {} - {}'.format(date, type(date))) + + def set_all_values(self, **kwargs): + # Required value + if kwargs.get('info'): + self.info = kwargs['info'] + elif not self.info: + raise NewAttributeError('The info field of the new event is required.') + + # Default values for a valid event to send to a MISP instance + if kwargs.get('distribution') is not None: + self.distribution = int(kwargs['distribution']) + if self.distribution not in [0, 1, 2, 3, 4]: + raise NewEventError('{} is invalid, the distribution has to be in 0, 1, 2, 3, 4'.format(self.distribution)) + if kwargs.get('threat_level_id') is not None: + self.threat_level_id = int(kwargs['threat_level_id']) + if self.threat_level_id not in [1, 2, 3, 4]: + raise NewEventError('{} is invalid, the threat_level has to be in 1, 2, 3, 4'.format(self.threat_level_id)) + if kwargs.get('analysis') is not None: + self.analysis = int(kwargs['analysis']) + if self.analysis not in [0, 1, 2]: + raise NewEventError('{} is invalid, the analysis has to be in 0, 1, 2'.format(self.analysis)) + if kwargs.get('published') is not None: + self.publish() + if kwargs.get('date'): + self.set_date(kwargs['date']) + if kwargs.get('Attribute'): + for a in kwargs['Attribute']: + attribute = MISPAttribute(self.describe_types) + attribute.set_all_values(**a) + self.attributes.append(attribute) + + # All other keys + if kwargs.get('id'): + self.id = int(kwargs['id']) + if kwargs.get('orgc_id'): + self.orgc_id = int(kwargs['orgc_id']) + if kwargs.get('org_id'): + self.org_id = int(kwargs['org_id']) + if kwargs.get('uuid'): + self.uuid = kwargs['uuid'] + if kwargs.get('attribute_count'): + self.attribute_count = int(kwargs['attribute_count']) + if kwargs.get('timestamp'): + self.timestamp = datetime.datetime.fromtimestamp(int(kwargs['timestamp'])) + if kwargs.get('proposal_email_lock'): + self.proposal_email_lock = kwargs['proposal_email_lock'] + if kwargs.get('locked'): + self.locked = kwargs['locked'] + if kwargs.get('publish_timestamp'): + self.publish_timestamp = datetime.datetime.fromtimestamp(int(kwargs['publish_timestamp'])) + if kwargs.get('sharing_group_id'): + self.sharing_group_id = int(kwargs['sharing_group_id']) + if kwargs.get('Org'): + self.Org = kwargs['Org'] + if kwargs.get('Orgc'): + self.Orgc = kwargs['Orgc'] + if kwargs.get('ShadowAttribute'): + self.ShadowAttribute = kwargs['ShadowAttribute'] + if kwargs.get('RelatedEvent'): + self.RelatedEvent = [] + for rel_event in kwargs['RelatedEvent']: + sub_event = MISPEvent() + sub_event.load(rel_event) + self.RelatedEvent.append(sub_event) + if kwargs.get('Galaxy'): + self.Galaxy = kwargs['Galaxy'] + if kwargs.get('Tag'): + self.Tag = kwargs['Tag'] + if kwargs.get('sig'): + self.sig = kwargs['sig'] + if kwargs.get('global_sig'): + self.global_sig = kwargs['global_sig'] + + def _json(self): + to_return = {'Event': {}} + to_return['Event'] = {'distribution': self.distribution, 'info': self.info, + 'date': self.date.isoformat(), 'published': self.published, + 'threat_level_id': self.threat_level_id, + 'analysis': self.analysis, 'Attribute': []} + if self.sig: + to_return['Event']['sig'] = self.sig + if self.global_sig: + to_return['Event']['global_sig'] = self.global_sig + if self.uuid: + to_return['Event']['uuid'] = self.uuid + if self.Tag: + to_return['Event']['Tag'] = self.Tag + if self.Orgc: + to_return['Event']['Orgc'] = self.Orgc + if self.Galaxy: + to_return['Event']['Galaxy'] = self.Galaxy + if self.sharing_group_id: + to_return['Event']['sharing_group_id'] = self.sharing_group_id + to_return['Event'] = _int_to_str(to_return['Event']) + if self.attributes: + to_return['Event']['Attribute'] = [a._json() for a in self.attributes] + jsonschema.validate(to_return, self.json_schema) + return to_return + + def _json_full(self): + to_return = self._json() + if self.id: + to_return['Event']['id'] = self.id + if self.orgc_id: + to_return['Event']['orgc_id'] = self.orgc_id + if self.org_id: + to_return['Event']['org_id'] = self.org_id + if self.locked is not None: + to_return['Event']['locked'] = self.locked + if self.attribute_count is not None: + to_return['Event']['attribute_count'] = self.attribute_count + if self.RelatedEvent: + to_return['Event']['RelatedEvent'] = [] + for rel_event in self.RelatedEvent: + to_return['Event']['RelatedEvent'].append(rel_event._json_full()) + if self.Org: + to_return['Event']['Org'] = self.Org + if self.sharing_group_id: + to_return['Event']['sharing_group_id'] = self.sharing_group_id + if self.ShadowAttribute: + to_return['Event']['ShadowAttribute'] = self.ShadowAttribute + if self.proposal_email_lock is not None: + to_return['Event']['proposal_email_lock'] = self.proposal_email_lock + if self.locked is not None: + to_return['Event']['locked'] = self.locked + if self.publish_timestamp: + to_return['Event']['publish_timestamp'] = int(time.mktime(self.publish_timestamp.timetuple())) + if self.timestamp: + to_return['Event']['timestamp'] = int(time.mktime(self.timestamp.timetuple())) + to_return['Event'] = _int_to_str(to_return['Event']) + if self.attributes: + to_return['Event']['Attribute'] = [a._json_full() for a in self.attributes] + jsonschema.validate(to_return, self.json_schema) + return to_return + + def publish(self): + self.published = True + + def unpublish(self): + self.published = False + + def delete_attribute(self, attribute_id): + found = False + for a in self.attributes: + if a.id == attribute_id or a.uuid == attribute_id: + a.delete() + found = True + break + if not found: + raise Exception('No attribute with UUID/ID {} found.'.format(attribute_id)) + + def add_attribute(self, type, value, **kwargs): + attribute = MISPAttribute(self.describe_types) + attribute.set_all_values(type=type, value=value, **kwargs) + self.attributes.append(attribute) diff --git a/pymisp/tools/__init__.py b/pymisp/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pymisp/tools/neo4j.py b/pymisp/tools/neo4j.py new file mode 100644 index 0000000..42f5214 --- /dev/null +++ b/pymisp/tools/neo4j.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import glob +import os +from pymisp import MISPEvent + +try: + from py2neo import authenticate, Graph, Node, Relationship + has_py2neo = True +except ImportError: + has_py2neo = False + + +class Neo4j(): + + def __init__(self, host='localhost:7474', username='neo4j', password='neo4j'): + if not has_py2neo: + raise Exception('py2neo is required, please install: pip install py2neo') + authenticate(host, username, password) + self.graph = Graph("http://{}/db/data/".format(host)) + + def load_events_directory(self, directory): + self.events = [] + for path in glob.glob(os.path.join(directory, '*.json')): + e = MISPEvent() + e.load(path) + self.import_event(e) + + def del_all(self): + self.graph.delete_all() + + def import_event(self, event): + tx = self.graph.begin() + event_node = Node('Event', uuid=event.uuid, name=event.info) + # event_node['distribution'] = event.distribution + # event_node['threat_level_id'] = event.threat_level_id + # event_node['analysis'] = event.analysis + # event_node['published'] = event.published + # event_node['date'] = event.date.isoformat() + tx.create(event_node) + for a in event.attributes: + attr_node = Node('Attribute', a.type, uuid=a.uuid) + attr_node['category'] = a.category + attr_node['name'] = a.value + # attr_node['to_ids'] = a.to_ids + # attr_node['comment'] = a.comment + # attr_node['distribution'] = a.distribution + tx.create(attr_node) + member_rel = Relationship(event_node, "is member", attr_node) + tx.create(member_rel) + val = Node('Value', name=a.value) + ev = Relationship(event_node, "has", val) + av = Relationship(attr_node, "is", val) + s = val | ev | av + tx.merge(s) + #tx.graph.push(s) + tx.commit() diff --git a/pymisp/tools/openioc.py b/pymisp/tools/openioc.py new file mode 100644 index 0000000..808f260 --- /dev/null +++ b/pymisp/tools/openioc.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pymisp import MISPEvent +try: + from bs4 import BeautifulSoup + has_bs4 = True +except ImportError: + has_bs4 = False + + +iocMispMapping = { + 'DriverItem/DriverName': {'category': 'Artifacts dropped', 'type': 'other', 'comment': 'DriverName.'}, + + 'DnsEntryItem/Host': {'type': 'domain'}, + + 'Email/To': {'type': 'target-email'}, + 'Email/Date': {'type': 'comment', 'comment': 'EmailDate.'}, + # 'Email/Body': {'type': 'email-subject'}, + 'Email/From': {'type': 'email-dst'}, + 'Email/Subject': {'type': 'email-subject'}, + 'Email/Attachment/Name': {'type': 'email-attachment'}, + + 'FileItem/Md5sum': {'type': 'md5'}, + 'FileItem/Sha1sum': {'type': 'sha1'}, + 'FileItem/Sha256sum': {'type': 'sha256'}, + + 'ServiceItem/serviceDLLmd5sum': {'type': 'md5', 'category': 'Payload installation'}, + 'ServiceItem/serviceDLLsha1sum': {'type': 'sha1', 'category': 'Payload installation'}, + 'ServiceItem/serviceDLLsha256sum': {'type': 'sha256', 'category': 'Payload installation'}, + + 'TaskItem/md5sum': {'type': 'md5'}, + 'TaskItem/sha1sum': {'type': 'sha1'}, + 'TaskItem/Sha256sum': {'type': 'sha256'}, + + 'FileItem/FileName': {'type': 'filename'}, + 'FileItem/FullPath': {'type': 'filename'}, + 'FileItem/FilePath': {'type': 'filename'}, + 'DriverItem/DriverName': {'type': 'filename'}, + + 'Network/URI': {'type': 'uri'}, + 'Network/DNS': {'type': 'domain'}, + 'Network/String': {'type': 'ip-dst'}, + 'RouteEntryItem/Destination': {'type': 'ip-dst'}, + 'Network/UserAgent': {'type': 'user-agent'}, + + 'PortItem/localIP': {'type': 'ip-src'}, + 'PortItem/remoteIP': {'type': 'ip-dst'}, + + 'ProcessItem/name': {'type': 'pattern-in-memory', 'comment': 'ProcessName.'}, + 'ProcessItem/path': {'type': 'pattern-in-memory', 'comment': 'ProcessPath.'}, + 'ProcessItem/Mutex': {'type': 'mutex'}, + 'ProcessItem/Pipe/Name': {'type': 'named pipe'}, + 'ProcessItem/Mutex/Name': {'type': 'mutex', 'comment': 'MutexName.'}, + + 'CookieHistoryItem/HostName': {'type': 'hostname'}, + 'FormHistoryItem/HostName': {'type': 'hostname'}, + 'SystemInfoItem/HostName': {'type': 'hostname'}, + 'UrlHistoryItem/HostName': {'type': 'hostname'}, + 'DnsEntryItem/RecordName': {'type': 'hostname'}, + 'DnsEntryItem/Host': {'type': 'hostname'}, + + # Is it the regkey value? + # 'RegistryItem/Text': {'type': 'regkey', 'RegistryText. '}, + 'RegistryItem/KeyPath': {'type': 'regkey'}, + 'RegistryItem/Path': {'type': 'regkey'}, + + 'ServiceItem/name': {'type': 'windows-service-name'}, + 'ServiceItem/type': {'type': 'pattern-in-memory', 'comment': 'ServiceType. '}, + + 'Snort/Snort': {'type': 'snort'}, +} + + +def extract_field(report, field_name): + data = report.find(field_name.lower()) + if data and hasattr(data, 'text'): + return data.text + return None + + +def load_openioc(openioc): + if not has_bs4: + raise Exception('You need to install BeautifulSoup: pip install bs4') + misp_event = MISPEvent() + with open(openioc, "r") as ioc_file: + iocreport = BeautifulSoup(ioc_file, "lxml") + # Set event fields + info = extract_field(iocreport, 'short_description') + if info: + misp_event.info = info + date = extract_field(iocreport, 'authored_date') + if date: + misp_event.set_date(date) + # Set special attributes + description = extract_field(iocreport, 'description') + if description: + misp_event.add_attribute('comment', description) + author = extract_field(iocreport, 'authored_by') + if author: + misp_event.add_attribute('comment', author) + misp_event = set_all_attributes(iocreport, misp_event) + return misp_event + + +def get_mapping(openioc_type): + t = openioc_type.lower() + for k, v in iocMispMapping.items(): + if k.lower() == t: + return v + return False + + +def set_all_attributes(openioc, misp_event): + for item in openioc.find_all("indicatoritem"): + attribute_values = {'comment': ''} + if item.find('context'): + mapping = get_mapping(item.find('context')['search']) + if mapping: + attribute_values.update(mapping) + else: + # Unknown mapping, ignoring + # print(item.find('context')) + continue + else: + continue + value = extract_field(item, 'Content') + if value: + attribute_values['value'] = value + else: + # No value, ignoring + continue + comment = extract_field(item, 'Comment') + if comment: + attribute_values["comment"] = '{} {}'.format(attribute_values["comment"], comment) + misp_event.add_attribute(**attribute_values) + return misp_event diff --git a/pymisp/tools/stix.py b/pymisp/tools/stix.py new file mode 100644 index 0000000..c3a81fb --- /dev/null +++ b/pymisp/tools/stix.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +try: + from misp_stix_converter.converters.buildMISPAttribute import buildEvent + from misp_stix_converter.converters import convert + from misp_stix_converter.converters.convert import MISPtoSTIX + has_misp_stix_converter = True +except ImportError: + has_misp_stix_converter = False + + +def load_stix(stix, distribution=3, threat_level_id=2, analysis=0): + '''Returns a MISPEvent object from a STIX package''' + if not has_misp_stix_converter: + raise Exception('You need to install misp_stix_converter: pip install git+https://github.com/MISP/MISP-STIX-Converter.git') + stix = convert.load_stix(stix) + return buildEvent(stix, distribution=distribution, + threat_level_id=threat_level_id, analysis=analysis) + + +def make_stix_package(misp_event, to_json=False, to_xml=False): + '''Returns a STIXPackage from a MISPEvent. + + Optionally can return the package in json or xml. + + ''' + if not has_misp_stix_converter: + raise Exception('You need to install misp_stix_converter: pip install git+https://github.com/MISP/MISP-STIX-Converter.git') + package = MISPtoSTIX(misp_event) + if to_json: + return package.to_json() + elif to_xml: + return package.to_xml() + else: + return package diff --git a/setup.py b/setup.py index 0e8f3e9..7ddac16 100644 --- a/setup.py +++ b/setup.py @@ -12,17 +12,22 @@ setup( maintainer='Raphaël Vinot', url='https://github.com/MISP/PyMISP', description='Python API for MISP.', - packages=['pymisp'], + packages=['pymisp', 'pymisp.tools'], classifiers=[ 'License :: OSI Approved :: BSD License', 'Development Status :: 5 - Production/Stable', 'Environment :: Console', + 'Operating System :: POSIX :: Linux', 'Intended Audience :: Science/Research', 'Intended Audience :: Telecommunications Industry', - 'Programming Language :: Python', + 'Intended Audience :: Information Technology', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', 'Topic :: Security', 'Topic :: Internet', ], test_suite="tests", - install_requires=['requests'], + install_requires=['requests', 'python-dateutil', 'jsonschema'], + include_package_data=True, + package_data={'data': ['schema.json', 'schema-lax.json', 'describeTypes.json']}, ) diff --git a/tests/57c4445b-c548-4654-af0b-4be3950d210f.json b/tests/57c4445b-c548-4654-af0b-4be3950d210f.json new file mode 100644 index 0000000..29bb02c --- /dev/null +++ b/tests/57c4445b-c548-4654-af0b-4be3950d210f.json @@ -0,0 +1 @@ +{"Event": {"info": "Ransomware - Xorist", "publish_timestamp": "1472548231", "timestamp": "1472541011", "analysis": "2", "Attribute": [{"category": "External analysis", "comment": "Imported via the Freetext Import Tool - Xchecked via VT: b3c4ae251f8094fa15b510051835c657eaef2a6cea46075d3aec964b14a99f68", "uuid": "57c5300c-0560-4146-bfaa-40e802de0b81", "timestamp": "1472540684", "to_ids": false, "value": "https://www.virustotal.com/file/b3c4ae251f8094fa15b510051835c657eaef2a6cea46075d3aec964b14a99f68/analysis/1469554268/", "type": "link"}, {"category": "External analysis", "comment": "", "uuid": "57c5310b-dc34-43cb-8b8e-4846950d210f", "timestamp": "1472541011", "to_ids": false, "value": "http://www.xylibox.com/2011/06/have-fun-with-trojan-ransomwin32xorist.html", "type": "link"}, {"category": "Other", "comment": "", "uuid": "57c444c0-8004-48fa-9c33-8aca950d210f", "timestamp": "1472480448", "to_ids": false, "value": "UPX packed", "type": "comment"}, {"category": "Other", "comment": "", "uuid": "57c44648-96f4-45d4-a8eb-453e950d210f", "timestamp": "1472480840", "to_ids": false, "value": "Key: 85350044dF4AC3518D185678A9414A7F,\r\nEncryption rounds:8,\r\nStart offset: 64,\r\nAlgorithm: TEA", "type": "text"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448a-fb04-457d-87e7-4127950d210f", "timestamp": "1472480394", "to_ids": true, "value": "3Z4wnG9603it23y.exe", "type": "filename"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448b-454c-4d17-90d1-4d2f950d210f", "timestamp": "1472480395", "to_ids": true, "value": "0749bae92ca336a02c83d126e04ec628", "type": "md5"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448a-bef0-4ba7-a071-444e950d210f", "timestamp": "1472480394", "to_ids": true, "value": "77b0c41b7d340b8a3d903f21347bbf06aa766b5b", "type": "sha1"}, {"category": "Payload delivery", "comment": "Imported via the Freetext Import Tool", "uuid": "57c4448b-3fa4-4d65-9ccc-4afa950d210f", "timestamp": "1472480395", "to_ids": true, "value": "b3c4ae251f8094fa15b510051835c657eaef2a6cea46075d3aec964b14a99f68", "type": "sha256"}, {"category": "Persistence mechanism", "comment": "", "uuid": "57c54b0f-27a4-458b-8e63-4455950d210f", "timestamp": "1472547599", "to_ids": true, "value": "Software\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Run|%TEMP%\\3Z4wnG9603it23y.exe", "type": "regkey|value"}], "Tag": [{"colour": "#ffffff", "exportable": true, "name": "tlp:white"}, {"colour": "#3d7a00", "exportable": true, "name": "circl:incident-classification=\"malware\""}, {"colour": "#420053", "exportable": true, "name": "ms-caro-malware:malware-type=\"Ransom\""}, {"colour": "#2c4f00", "exportable": true, "name": "malware_classification:malware-category=\"Ransomware\""}], "published": true, "date": "2016-08-29", "Orgc": {"name": "CIRCL", "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f"}, "threat_level_id": "3", "uuid": "57c4445b-c548-4654-af0b-4be3950d210f"}} \ No newline at end of file diff --git a/tests/misp_event.json b/tests/misp_event.json new file mode 100644 index 0000000..1db311a --- /dev/null +++ b/tests/misp_event.json @@ -0,0 +1,78 @@ +{ + "Attribute": [ + { + "ShadowAttribute": [], + "SharingGroup": [], + "category": "Payload delivery", + "comment": "", + "deleted": false, + "distribution": "5", + "event_id": "2", + "id": "7", + "sharing_group_id": "0", + "timestamp": "1465681304", + "to_ids": false, + "type": "url", + "uuid": "575c8598-f1f0-4c16-a94a-0612c0a83866", + "value": "http://fake.website.com/malware/is/here" + }, + { + "ShadowAttribute": [], + "SharingGroup": [], + "category": "Payload type", + "comment": "", + "deleted": false, + "distribution": "5", + "event_id": "2", + "id": "6", + "sharing_group_id": "0", + "timestamp": "1465681801", + "to_ids": false, + "type": "text", + "uuid": "575c8549-9010-4555-8b37-057ac0a83866", + "value": "Locky" + } + ], + "Org": { + "id": "1", + "name": "ORGNAME", + "uuid": "57586e9a-4a64-4f79-9009-4dc1c0a83866" + }, + "Orgc": { + "id": "1", + "name": "ORGNAME", + "uuid": "57586e9a-4a64-4f79-9009-4dc1c0a83866" + }, + "RelatedEvent": [], + "ShadowAttribute": [], + "Tag": [ + { + "colour": "#005a5a", + "exportable": true, + "id": "6", + "name": "ecsirt:malicious-code=\"ransomware\"" + }, + { + "colour": "#142bf7", + "exportable": true, + "id": "1", + "name": "for_intelmq_processing" + } + ], + "analysis": "0", + "attribute_count": "2", + "date": "2016-06-09", + "distribution": "0", + "id": "2", + "info": "A Random Event", + "locked": false, + "org_id": "1", + "orgc_id": "1", + "proposal_email_lock": false, + "publish_timestamp": "0", + "published": false, + "sharing_group_id": "0", + "threat_level_id": "1", + "timestamp": "1465681801", + "uuid": "5758ebf5-c898-48e6-9fe9-5665c0a83866" +} diff --git a/tests/new_misp_event.json b/tests/new_misp_event.json new file mode 100644 index 0000000..88ac8f9 --- /dev/null +++ b/tests/new_misp_event.json @@ -0,0 +1,34 @@ +{ + "Event": { + "uuid": "57c06bb1-625c-4d34-9b9f-4066950d210f", + "orgc_id": "1", + "publish_timestamp": "0", + "RelatedEvent": [], + "org_id": "1", + "Org": { + "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f", + "name": "CIRCL", + "id": "1" + }, + "attribute_count": null, + "distribution": "0", + "sharing_group_id": "0", + "threat_level_id": "1", + "locked": false, + "Attribute": [], + "published": false, + "ShadowAttribute": [], + "date": "2016-08-26", + "info": "This is a test", + "timestamp": "1472228273", + "Orgc": { + "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f", + "name": "CIRCL", + "id": "1" + }, + "id": "594", + "proposal_email_lock": false, + "analysis": "0" + } +} + diff --git a/tests/search_index_result.json b/tests/search_index_result.json new file mode 100644 index 0000000..bef46d0 --- /dev/null +++ b/tests/search_index_result.json @@ -0,0 +1,69 @@ +[ + { + "id": "3", + "org": "", + "date": "2016-12-01", + "info": "Another random Event", + "published": false, + "uuid": "5758ebf5-c898-48e6-9fe9-5665c0a83866", + "attribute_count": "2", + "analysis": "0", + "orgc": "", + "timestamp": "1465681801", + "distribution": "3", + "proposal_email_lock": false, + "locked": false, + "threat_level_id": "1", + "publish_timestamp": "0", + "sharing_group_id": "0", + "org_id": "1", + "orgc_id": "1", + "Org": { + "id": "1", + "name": "ORGNAME" + }, + "Orgc": { + "id": "1", + "name": "ORGNAME" + }, + "EventTag": [ + { + "id": "9760", + "event_id": "6028", + "tag_id": "4", + "Tag": { + "id": "4", + "name": "TLP:GREEN", + "colour": "#33822d", + "exportable": true + } + }, + { + "id": "9801", + "event_id": "3", + "tag_id": "1", + "Tag": { + "id": "1", + "name": "for_intelmq_processing", + "colour": "#00ad1c", + "exportable": true + } + }, + { + "id": "9803", + "event_id": "3", + "tag_id": "6", + "Tag": { + "id": "6", + "name": "ecsirt:malicious-code=\"ransomware\"", + "colour": "#005a5a", + "exportable": true + } + } + ], + "SharingGroup": { + "id": null, + "name": null + } + } +] diff --git a/tests/sharing_groups.json b/tests/sharing_groups.json new file mode 100644 index 0000000..96a3e5f --- /dev/null +++ b/tests/sharing_groups.json @@ -0,0 +1,100 @@ +{ + "response": [ + { + "SharingGroup": { + "id": "1", + "name": "PrivateTrustedGroup", + "description": "", + "releasability": "", + "local": true, + "active": true + }, + "Organisation": { + "id": "1", + "name": "CIRCL", + "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f" + }, + "SharingGroupOrg": [ + { + "id": "1", + "sharing_group_id": "1", + "org_id": "1", + "extend": true, + "Organisation": { + "name": "CIRCL", + "id": "1", + "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f" + } + }, + { + "id": "2", + "sharing_group_id": "1", + "org_id": "2", + "extend": false, + "Organisation": { + "name": "PifPafPoum", + "id": "2", + "uuid": "56bf12a7-c19c-4b98-83e7-d9bb02de0b81" + } + } + ], + "SharingGroupServer": [ + { + "all_orgs": false, + "server_id": "0", + "sharing_group_id": "1", + "Server": [] + } + ], + "editable": true + }, + { + "SharingGroup": { + "id": "2", + "name": "test", + "description": "", + "releasability": "", + "local": true, + "active": true + }, + "Organisation": { + "id": "1", + "name": "CIRCL", + "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f" + }, + "SharingGroupOrg": [ + { + "id": "3", + "sharing_group_id": "2", + "org_id": "1", + "extend": true, + "Organisation": { + "name": "CIRCL", + "id": "1", + "uuid": "55f6ea5e-2c60-40e5-964f-47a8950d210f" + } + }, + { + "id": "4", + "sharing_group_id": "2", + "org_id": "2", + "extend": false, + "Organisation": { + "name": "PifPafPoum", + "id": "2", + "uuid": "56bf12a7-c19c-4b98-83e7-d9bb02de0b81" + } + } + ], + "SharingGroupServer": [ + { + "all_orgs": false, + "server_id": "0", + "sharing_group_id": "2", + "Server": [] + } + ], + "editable": true + } + ] +} diff --git a/tests/test.py b/tests/test.py index 4bd0b2f..dfd93aa 100755 --- a/tests/test.py +++ b/tests/test.py @@ -1,5 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +from __future__ import print_function from pymisp import PyMISP from keys import url, key @@ -41,13 +42,14 @@ class TestBasic(unittest.TestCase): event = self.misp.new_event(0, 1, 0, "This is a test") event_id = self._clean_event(event) to_check = {u'Event': {u'info': u'This is a test', u'locked': False, - u'attribute_count': None, u'analysis': u'0', + u'attribute_count': None, 'disable_correlation': False, u'analysis': u'0', u'ShadowAttribute': [], u'published': False, u'distribution': u'0', u'Attribute': [], u'proposal_email_lock': False, u'Org': {u'name': u'ORGNAME'}, u'Orgc': {u'name': u'ORGNAME'}, + u'Galaxy': [], u'threat_level_id': u'1'}} - print event + print(event) self.assertEqual(event, to_check, 'Failed at creating a new Event') return int(event_id) @@ -61,6 +63,7 @@ class TestBasic(unittest.TestCase): u'ShadowAttribute': [], u'published': False, u'distribution': u'0', u'Org': {u'name': u'ORGNAME'}, u'Orgc': {u'name': u'ORGNAME'}, + u'Galaxy': [], u'Attribute': [ {u'category': u'Payload installation', u'comment': u'Fanny modules', u'to_ids': False, u'value': u'dll_installer.dll|0a209ac0de4ac033f31d6ba9191a8f7a', @@ -84,6 +87,7 @@ class TestBasic(unittest.TestCase): u'ShadowAttribute': [], u'published': True, u'distribution': u'0', u'Org': {u'name': u'ORGNAME'}, u'Orgc': {u'name': u'ORGNAME'}, + u'Galaxy': [], u'Attribute': [ {u'category': u'Payload installation', u'comment': u'Fanny modules', u'to_ids': False, u'value': u'dll_installer.dll|0a209ac0de4ac033f31d6ba9191a8f7a', @@ -99,15 +103,19 @@ class TestBasic(unittest.TestCase): def delete(self, eventid): event = self.misp.delete_event(eventid) - print event.json() + print(event) def delete_attr(self, attrid): event = self.misp.delete_attribute(attrid) - print event.json() + print(event) def get(self, eventid): event = self.misp.get_event(eventid) - print event.json() + print(event) + + def get_stix(self, **kwargs): + event = self.misp.get_stix(kwargs) + print(event) def add(self): event = {u'Event': {u'info': u'This is a test', u'locked': False, @@ -125,7 +133,7 @@ class TestBasic(unittest.TestCase): u'ShadowAttribute': [], u'distribution': u'2', u'type': u'filename|sha256'}], u'proposal_email_lock': False, u'threat_level_id': u'1'}} event = self.misp.add_event(event) - print event.json() + print(event) def test_create_event(self): eventid = self.new_event() @@ -151,6 +159,9 @@ class TestBasic(unittest.TestCase): time.sleep(1) self.delete(eventid) + def test_one_or_more(self): + self.assertEqual(self.misp._one_or_more(1), (1,)) + self.assertEqual(self.misp._one_or_more([1]), [1]) if __name__ == '__main__': unittest.main() diff --git a/tests/test_offline.py b/tests/test_offline.py new file mode 100644 index 0000000..298485c --- /dev/null +++ b/tests/test_offline.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import unittest +import requests_mock +import json +import os + +import pymisp as pm +from pymisp import PyMISP +# from pymisp import NewEventError +from pymisp import MISPEvent +from pymisp import EncodeUpdate +from pymisp import EncodeFull + + +@requests_mock.Mocker() +class TestOffline(unittest.TestCase): + + def setUp(self): + self.maxDiff = None + self.domain = 'http://misp.local/' + self.key = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + with open('tests/misp_event.json', 'r') as f: + self.event = {'Event': json.load(f)} + with open('tests/new_misp_event.json', 'r') as f: + self.new_misp_event = {'Event': json.load(f)} + self.ressources_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../pymisp/data') + with open(os.path.join(self.ressources_path, 'describeTypes.json'), 'r') as f: + self.types = json.load(f) + with open('tests/sharing_groups.json', 'r') as f: + self.sharing_groups = json.load(f) + self.auth_error_msg = {"name": "Authentication failed. Please make sure you pass the API key of an API enabled user along in the Authorization header.", + "message": "Authentication failed. Please make sure you pass the API key of an API enabled user along in the Authorization header.", + "url": "\/events\/1"} + with open('tests/search_index_result.json', 'r') as f: + self.search_index_result = json.load(f) + + def initURI(self, m): + m.register_uri('GET', self.domain + 'events/1', json=self.auth_error_msg, status_code=403) + m.register_uri('GET', self.domain + 'servers/getVersion.json', json={"version": "2.4.56"}) + m.register_uri('GET', self.domain + 'sharing_groups.json', json=self.sharing_groups) + m.register_uri('GET', self.domain + 'attributes/describeTypes.json', json=self.types) + m.register_uri('GET', self.domain + 'events/2', json=self.event) + m.register_uri('POST', self.domain + 'events/5758ebf5-c898-48e6-9fe9-5665c0a83866', json=self.event) + m.register_uri('DELETE', self.domain + 'events/2', json={'message': 'Event deleted.'}) + m.register_uri('DELETE', self.domain + 'events/3', json={'errors': ['Invalid event'], 'message': 'Invalid event', 'name': 'Invalid event', 'url': '/events/3'}) + m.register_uri('DELETE', self.domain + 'attributes/2', json={'message': 'Attribute deleted.'}) + m.register_uri('GET', self.domain + 'events/index/searchtag:1', json=self.search_index_result) + m.register_uri('GET', self.domain + 'events/index/searchtag:ecsirt:malicious-code=%22ransomware%22', json=self.search_index_result) + + def test_getEvent(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + e1 = pymisp.get_event(2) + e2 = pymisp.get(2) + self.assertEqual(e1, e2) + self.assertEqual(self.event, e2) + + def test_updateEvent(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + e0 = pymisp.update_event('5758ebf5-c898-48e6-9fe9-5665c0a83866', json.dumps(self.event)) + e1 = pymisp.update_event('5758ebf5-c898-48e6-9fe9-5665c0a83866', self.event) + self.assertEqual(e0, e1) + e2 = pymisp.update(e0) + self.assertEqual(e1, e2) + self.assertEqual(self.event, e2) + + def test_deleteEvent(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + d = pymisp.delete_event(2) + self.assertEqual(d, {'message': 'Event deleted.'}) + d = pymisp.delete_event(3) + self.assertEqual(d, {'errors': ['Invalid event'], 'message': 'Invalid event', 'name': 'Invalid event', 'url': '/events/3'}) + + def test_deleteAttribute(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + d = pymisp.delete_attribute(2) + self.assertEqual(d, {'message': 'Attribute deleted.'}) + + def test_publish(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + e = pymisp.publish(self.event) # requests-mock always return the non-published event + pub = self.event + pub['Event']['published'] = True + # self.assertEqual(e, pub) FIXME: broken test, not-published event returned + e = pymisp.publish(self.event) + self.assertEqual(e, {'error': 'Already published'}) + + def test_getVersions(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + api_version = pymisp.get_api_version() + self.assertEqual(api_version, {'version': pm.__version__}) + server_version = pymisp.get_version() + self.assertEqual(server_version, {"version": "2.4.56"}) + + def test_getSharingGroups(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + sharing_groups = pymisp.get_sharing_groups() + self.assertEqual(sharing_groups[0], self.sharing_groups['response'][0]) + + def test_auth_error(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + error = pymisp.get(1) + response = self.auth_error_msg + response['errors'] = [response['message']] + self.assertEqual(error, response) + + def test_newEvent(self, m): + error_empty_info = {'message': 'The event could not be saved.', 'name': 'Add event failed.', 'errors': {'Event': {'info': ['Info cannot be empty.']}}, 'url': '/events/add'} + error_empty_info_flatten = {u'message': u'The event could not be saved.', u'name': u'Add event failed.', u'errors': [u"Error in info: Info cannot be empty."], u'url': u'/events/add'} + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + m.register_uri('POST', self.domain + 'events', json=error_empty_info) + # TODO Add test exception if info field isn't set + response = pymisp.new_event(0, 1, 0, 'Foo') + self.assertEqual(response, error_empty_info_flatten) + m.register_uri('POST', self.domain + 'events', json=self.new_misp_event) + response = pymisp.new_event(0, 1, 0, "This is a test.", '2016-08-26', False) + self.assertEqual(response, self.new_misp_event) + + def test_eventObject(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + misp_event = MISPEvent(pymisp.describe_types) + with open('tests/57c4445b-c548-4654-af0b-4be3950d210f.json', 'r') as f: + misp_event.load(f.read()) + json.dumps(misp_event, cls=EncodeUpdate) + json.dumps(misp_event, cls=EncodeFull) + + def test_searchIndexByTagId(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + response = pymisp.search_index(tag="1") + self.assertEqual(response['response'], self.search_index_result) + + def test_searchIndexByTagName(self, m): + self.initURI(m) + pymisp = PyMISP(self.domain, self.key) + response = pymisp.search_index(tag='ecsirt:malicious-code="ransomware"') + self.assertEqual(response['response'], self.search_index_result) + + def test_addAttributes(self, m): + class MockPyMISP(PyMISP): + def _send_attributes(self, event, attributes, proposal=False): + return len(attributes) + self.initURI(m) + p = MockPyMISP(self.domain, self.key) + evt = p.get(1) + self.assertEquals(3, p.add_hashes(evt, md5='68b329da9893e34099c7d8ad5cb9c940', + sha1='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + sha256='01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b', + filename='foobar.exe')) + self.assertEquals(3, p.add_hashes(evt, md5='68b329da9893e34099c7d8ad5cb9c940', + sha1='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + sha256='01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b')) + p.av_detection_link(evt, 'https://foocorp.com') + p.add_detection_name(evt, 'WATERMELON') + p.add_filename(evt, 'foobar.exe') + p.add_regkey(evt, 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\foobar') + p.add_regkey(evt, 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\foobar', rvalue='foobar') + regkeys = { + 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\foo': None, + 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\bar': 'baz', + 'HKLM\\Software\\Microsoft\\Outlook\\Addins\\bae': 0, + } + self.assertEqual(3, p.add_regkeys(evt, regkeys)) + p.add_pattern(evt, '.*foobar.*', in_memory=True) + p.add_pattern(evt, '.*foobar.*', in_file=True) + self.assertRaises(pm.PyMISPError, p.add_pattern, evt, '.*foobar.*', in_memory=False, in_file=False) + p.add_pipe(evt, 'foo') + p.add_pipe(evt, '\\.\\pipe\\foo') + self.assertEquals(3, p.add_pipe(evt, ['foo', 'bar', 'baz'])) + self.assertEquals(3, p.add_pipe(evt, ['foo', 'bar', '\\.\\pipe\\baz'])) + p.add_mutex(evt, 'foo') + self.assertEquals(1, p.add_mutex(evt, '\\BaseNamedObjects\\foo')) + self.assertEquals(3, p.add_mutex(evt, ['foo', 'bar', 'baz'])) + self.assertEquals(3, p.add_mutex(evt, ['foo', 'bar', '\\BaseNamedObjects\\baz'])) + p.add_yara(evt, 'rule Foo {}') + self.assertEquals(2, p.add_yara(evt, ['rule Foo {}', 'rule Bar {}'])) + p.add_ipdst(evt, '1.2.3.4') + self.assertEquals(2, p.add_ipdst(evt, ['1.2.3.4', '5.6.7.8'])) + p.add_ipsrc(evt, '1.2.3.4') + self.assertEquals(2, p.add_ipsrc(evt, ['1.2.3.4', '5.6.7.8'])) + p.add_hostname(evt, 'a.foobar.com') + self.assertEquals(2, p.add_hostname(evt, ['a.foobar.com', 'a.foobaz.com'])) + p.add_domain(evt, 'foobar.com') + self.assertEquals(2, p.add_domain(evt, ['foobar.com', 'foobaz.com'])) + p.add_domain_ip(evt, 'foo.com', '1.2.3.4') + self.assertEquals(2, p.add_domain_ip(evt, 'foo.com', ['1.2.3.4', '5.6.7.8'])) + self.assertEquals(2, p.add_domains_ips(evt, {'foo.com': '1.2.3.4', 'bar.com': '4.5.6.7'})) + p.add_url(evt, 'https://example.com') + self.assertEquals(2, p.add_url(evt, ['https://example.com', 'http://foo.com'])) + p.add_useragent(evt, 'Mozilla') + self.assertEquals(2, p.add_useragent(evt, ['Mozilla', 'Godzilla'])) + p.add_traffic_pattern(evt, 'blabla') + p.add_snort(evt, 'blaba') + p.add_net_other(evt, 'blabla') + p.add_email_src(evt, 'foo@bar.com') + p.add_email_dst(evt, 'foo@bar.com') + p.add_email_subject(evt, 'you won the lottery') + p.add_email_attachment(evt, 'foo.doc') + p.add_target_email(evt, 'foo@bar.com') + p.add_target_user(evt, 'foo') + p.add_target_machine(evt, 'foobar') + p.add_target_org(evt, 'foobar') + p.add_target_location(evt, 'foobar') + p.add_target_external(evt, 'foobar') + p.add_threat_actor(evt, 'WATERMELON') + p.add_internal_link(evt, 'foobar') + p.add_internal_comment(evt, 'foobar') + p.add_internal_text(evt, 'foobar') + p.add_internal_other(evt, 'foobar') + p.add_attachment(evt, "testFile", "Attacment added!") +if __name__ == '__main__': + unittest.main()