pull/1313/head
Delta-Sierra 2024-12-09 08:08:29 +01:00
commit 23637d60ca
208 changed files with 40593 additions and 18370 deletions

17
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,17 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every weekday
interval: "daily"

74
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,74 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "main" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "main" ]
schedule:
- cron: '21 10 * * 1'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

50
.github/workflows/pytest.yml vendored Normal file
View File

@ -0,0 +1,50 @@
name: Python application
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up Python ${{matrix.python-version}}
uses: actions/setup-python@v5
with:
python-version: ${{matrix.python-version}}
- name: Install python 3.13 specific dependencies
if: ${{ matrix.python-version == '3.13' }}
run: |
sudo apt-get install -y build-essential python3-dev libfuzzy-dev
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip poetry
poetry install -E fileobjects -E openioc -E virustotal -E docs -E pdfexport -E url -E email -E brotli -vvv
- name: Test with nosetests
run: |
poetry run pytest --cov=pymisp tests/test_*.py
poetry run mypy .
- name: Test with nosetests with orjson
run: |
pip3 install orjson
poetry run pytest --cov=pymisp tests/test_*.py
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5

27
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,27 @@
on:
release:
types:
- published
name: release
jobs:
pypi-publish:
name: Upload release to PyPI
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/pymisp
permissions:
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: 'recursive'
- name: Install Poetry
run: python -m pip install --upgrade pip poetry
- name: Build artifacts
run: poetry build
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@release/v1

8
.gitignore vendored
View File

@ -1,13 +1,21 @@
*.swp
*.pem
*.pyc
docs/build/
examples/keys.py
examples/cudeso.py
examples/feed-generator/output/*\.json
examples/feed-generator/output/hashes\.csv
examples/feed-generator/settings\.py
examples/feed_generator/output/*\.json
examples/feed_generator/output/hashes\.csv
examples/feed_generator/settings\.py
tests/reportlab_testoutputs/*\.pdf
build/*
dist/*
pymisp.egg-info/*
.coverage
.idea
tests/keys.py

3
.gitmodules vendored
View File

@ -1,6 +1,3 @@
[submodule "pymisp/data/misp-objects"]
path = pymisp/data/misp-objects
url = https://github.com/MISP/misp-objects
[submodule "pymisp/tools/pdf_fonts"]
path = pymisp/tools/pdf_fonts
url = https://github.com/MISP/pdf_fonts

16
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,16 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: "tests/data"
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/asottile/pyupgrade
rev: v3.17.0
hooks:
- id: pyupgrade
args: [--py38-plus]

View File

@ -1,14 +1,14 @@
version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3"
python:
version: 3.6
install:
- method: pip
path: .
extra_requirements:
- docs
build:
image: latest
formats: all

View File

@ -1,30 +0,0 @@
language: python
cache: pip
addons:
apt:
sources: [ 'ubuntu-toolchain-r-test' ]
packages:
- libstdc++6
- libfuzzy-dev
python:
- "2.7"
- "3.5-dev"
- "3.6"
- "3.6-dev"
install:
- pip install pipenv
- pipenv install --dev
- pushd tests
- git clone https://github.com/viper-framework/viper-test-files.git
- popd
script:
- pipenv run nosetests --with-coverage --cover-package=pymisp,tests --cover-tests tests/test_*.py
after_success:
- pipenv run codecov
- pipenv run coveralls

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
graft docs
graft examples
graft tests
include CHANGELOG.txt
include LICENSE
include pymisp/data/*.json
include pymisp/data/misp-objects/*.json
include pymisp/data/misp-objects/objects/*/definition.json
include pymisp/data/misp-objects/relationships/definition.json
include README.md

18
Pipfile
View File

@ -1,18 +0,0 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
nose = "*"
coveralls = "*"
codecov = "*"
requests-mock = "*"
[packages]
pymisp = {editable = true,extras = ["fileobjects", "neo", "openioc", "virustotal", "pdfexport"],path = "."}
pydeep = {editable = true,git = "https://github.com/kbandla/pydeep.git"}
pymispwarninglists = {editable = true,git = "https://github.com/MISP/PyMISPWarningLists.git"}
[requires]
python_version = "3.6"

489
Pipfile.lock generated
View File

@ -1,489 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "c95b6920af9d48d6e38e0456394f752479064c9f3091cf3e6b93e751de21cfad"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.6"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"asn1crypto": {
"hashes": [
"sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
"sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
],
"version": "==0.24.0"
},
"attrs": {
"hashes": [
"sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
"sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
],
"version": "==19.1.0"
},
"beautifulsoup4": {
"hashes": [
"sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
"sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
"sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
],
"version": "==4.7.1"
},
"certifi": {
"hashes": [
"sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
"sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
],
"version": "==2019.3.9"
},
"cffi": {
"hashes": [
"sha256:00b97afa72c233495560a0793cdc86c2571721b4271c0667addc83c417f3d90f",
"sha256:0ba1b0c90f2124459f6966a10c03794082a2f3985cd699d7d63c4a8dae113e11",
"sha256:0bffb69da295a4fc3349f2ec7cbe16b8ba057b0a593a92cbe8396e535244ee9d",
"sha256:21469a2b1082088d11ccd79dd84157ba42d940064abbfa59cf5f024c19cf4891",
"sha256:2e4812f7fa984bf1ab253a40f1f4391b604f7fc424a3e21f7de542a7f8f7aedf",
"sha256:2eac2cdd07b9049dd4e68449b90d3ef1adc7c759463af5beb53a84f1db62e36c",
"sha256:2f9089979d7456c74d21303c7851f158833d48fb265876923edcb2d0194104ed",
"sha256:3dd13feff00bddb0bd2d650cdb7338f815c1789a91a6f68fdc00e5c5ed40329b",
"sha256:4065c32b52f4b142f417af6f33a5024edc1336aa845b9d5a8d86071f6fcaac5a",
"sha256:51a4ba1256e9003a3acf508e3b4f4661bebd015b8180cc31849da222426ef585",
"sha256:59888faac06403767c0cf8cfb3f4a777b2939b1fbd9f729299b5384f097f05ea",
"sha256:59c87886640574d8b14910840327f5cd15954e26ed0bbd4e7cef95fa5aef218f",
"sha256:610fc7d6db6c56a244c2701575f6851461753c60f73f2de89c79bbf1cc807f33",
"sha256:70aeadeecb281ea901bf4230c6222af0248c41044d6f57401a614ea59d96d145",
"sha256:71e1296d5e66c59cd2c0f2d72dc476d42afe02aeddc833d8e05630a0551dad7a",
"sha256:8fc7a49b440ea752cfdf1d51a586fd08d395ff7a5d555dc69e84b1939f7ddee3",
"sha256:9b5c2afd2d6e3771d516045a6cfa11a8da9a60e3d128746a7fe9ab36dfe7221f",
"sha256:9c759051ebcb244d9d55ee791259ddd158188d15adee3c152502d3b69005e6bd",
"sha256:b4d1011fec5ec12aa7cc10c05a2f2f12dfa0adfe958e56ae38dc140614035804",
"sha256:b4f1d6332339ecc61275bebd1f7b674098a66fea11a00c84d1c58851e618dc0d",
"sha256:c030cda3dc8e62b814831faa4eb93dd9a46498af8cd1d5c178c2de856972fd92",
"sha256:c2e1f2012e56d61390c0e668c20c4fb0ae667c44d6f6a2eeea5d7148dcd3df9f",
"sha256:c37c77d6562074452120fc6c02ad86ec928f5710fbc435a181d69334b4de1d84",
"sha256:c8149780c60f8fd02752d0429246088c6c04e234b895c4a42e1ea9b4de8d27fb",
"sha256:cbeeef1dc3c4299bd746b774f019de9e4672f7cc666c777cd5b409f0b746dac7",
"sha256:e113878a446c6228669144ae8a56e268c91b7f1fafae927adc4879d9849e0ea7",
"sha256:e21162bf941b85c0cda08224dade5def9360f53b09f9f259adb85fc7dd0e7b35",
"sha256:fb6934ef4744becbda3143d30c6604718871495a5e36c408431bf33d9c146889"
],
"version": "==1.12.2"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"click": {
"hashes": [
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
],
"version": "==7.0"
},
"colorama": {
"hashes": [
"sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d",
"sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"
],
"version": "==0.4.1"
},
"cryptography": {
"hashes": [
"sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1",
"sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705",
"sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6",
"sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1",
"sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8",
"sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151",
"sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d",
"sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659",
"sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537",
"sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e",
"sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb",
"sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c",
"sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9",
"sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5",
"sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad",
"sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a",
"sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460",
"sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd",
"sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6"
],
"version": "==2.6.1"
},
"decorator": {
"hashes": [
"sha256:86156361c50488b84a3f148056ea716ca587df2f0de1d34750d35c21312725de",
"sha256:f069f3a01830ca754ba5258fde2278454a0b5b79e0d7f5c13b3b97e57d4acff6"
],
"version": "==4.4.0"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
],
"version": "==2.8"
},
"ipaddress": {
"hashes": [
"sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
"sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
],
"version": "==1.0.22"
},
"jsonschema": {
"hashes": [
"sha256:0c0a81564f181de3212efa2d17de1910f8732fa1b71c42266d983cd74304e20d",
"sha256:a5f6559964a3851f59040d3b961de5e68e70971afb88ba519d27e6a039efff1a"
],
"version": "==3.0.1"
},
"lief": {
"hashes": [
"sha256:c95974006a6b8a767eea8b35e6c63e2b20939730063ac472894b53ab9855a0b5"
],
"version": "==0.9.0"
},
"neobolt": {
"hashes": [
"sha256:3324f2b319e84acb82e37a81ef75f3f7ce71c149387daf900589377db48bed2a"
],
"version": "==1.7.4"
},
"neotime": {
"hashes": [
"sha256:4e0477ba0f24e004de2fa79a3236de2bd941f20de0b5db8d976c52a86d7363eb"
],
"version": "==1.7.4"
},
"pillow": {
"hashes": [
"sha256:051de330a06c99d6f84bcf582960487835bcae3fc99365185dc2d4f65a390c0e",
"sha256:0ae5289948c5e0a16574750021bd8be921c27d4e3527800dc9c2c1d2abc81bf7",
"sha256:0b1efce03619cdbf8bcc61cfae81fcda59249a469f31c6735ea59badd4a6f58a",
"sha256:163136e09bd1d6c6c6026b0a662976e86c58b932b964f255ff384ecc8c3cefa3",
"sha256:18e912a6ccddf28defa196bd2021fe33600cbe5da1aa2f2e2c6df15f720b73d1",
"sha256:24ec3dea52339a610d34401d2d53d0fb3c7fd08e34b20c95d2ad3973193591f1",
"sha256:267f8e4c0a1d7e36e97c6a604f5b03ef58e2b81c1becb4fccecddcb37e063cc7",
"sha256:3273a28734175feebbe4d0a4cde04d4ed20f620b9b506d26f44379d3c72304e1",
"sha256:4c678e23006798fc8b6f4cef2eaad267d53ff4c1779bd1af8725cc11b72a63f3",
"sha256:4d4bc2e6bb6861103ea4655d6b6f67af8e5336e7216e20fff3e18ffa95d7a055",
"sha256:505738076350a337c1740a31646e1de09a164c62c07db3b996abdc0f9d2e50cf",
"sha256:5233664eadfa342c639b9b9977190d64ad7aca4edc51a966394d7e08e7f38a9f",
"sha256:5d95cb9f6cced2628f3e4de7e795e98b2659dfcc7176ab4a01a8b48c2c2f488f",
"sha256:7eda4c737637af74bac4b23aa82ea6fbb19002552be85f0b89bc27e3a762d239",
"sha256:801ddaa69659b36abf4694fed5aa9f61d1ecf2daaa6c92541bbbbb775d97b9fe",
"sha256:825aa6d222ce2c2b90d34a0ea31914e141a85edefc07e17342f1d2fdf121c07c",
"sha256:9c215442ff8249d41ff58700e91ef61d74f47dfd431a50253e1a1ca9436b0697",
"sha256:a3d90022f2202bbb14da991f26ca7a30b7e4c62bf0f8bf9825603b22d7e87494",
"sha256:a631fd36a9823638fe700d9225f9698fb59d049c942d322d4c09544dc2115356",
"sha256:a6523a23a205be0fe664b6b8747a5c86d55da960d9586db039eec9f5c269c0e6",
"sha256:a756ecf9f4b9b3ed49a680a649af45a8767ad038de39e6c030919c2f443eb000",
"sha256:b117287a5bdc81f1bac891187275ec7e829e961b8032c9e5ff38b70fd036c78f",
"sha256:ba04f57d1715ca5ff74bb7f8a818bf929a204b3b3c2c2826d1e1cc3b1c13398c",
"sha256:cd878195166723f30865e05d87cbaf9421614501a4bd48792c5ed28f90fd36ca",
"sha256:cee815cc62d136e96cf76771b9d3eb58e0777ec18ea50de5cfcede8a7c429aa8",
"sha256:d1722b7aa4b40cf93ac3c80d3edd48bf93b9208241d166a14ad8e7a20ee1d4f3",
"sha256:d7c1c06246b05529f9984435fc4fa5a545ea26606e7f450bdbe00c153f5aeaad",
"sha256:e9c8066249c040efdda84793a2a669076f92a301ceabe69202446abb4c5c5ef9",
"sha256:f227d7e574d050ff3996049e086e1f18c7bd2d067ef24131e50a1d3fe5831fbc",
"sha256:fc9a12aad714af36cf3ad0275a96a733526571e52710319855628f476dcb144e"
],
"version": "==5.4.1"
},
"prompt-toolkit": {
"hashes": [
"sha256:11adf3389a996a6d45cc277580d0d53e8a5afd281d0c9ec71b28e6f121463780",
"sha256:2519ad1d8038fd5fc8e770362237ad0364d16a7650fb5724af6997ed5515e3c1",
"sha256:977c6583ae813a37dc1c2e1b715892461fcbdaa57f6fc62f33a528c4886c8f55"
],
"version": "==2.0.9"
},
"py2neo": {
"hashes": [
"sha256:c25d24a1504bbfaf61e862e29953f17ad67a4810d55531b1436ad0c7664d85fd"
],
"version": "==4.2.0"
},
"pycparser": {
"hashes": [
"sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
],
"version": "==2.19"
},
"pydeep": {
"editable": true,
"git": "https://github.com/kbandla/pydeep.git",
"ref": "bc0d33bff4b45718b4c5f2c79d4715d92a427eda"
},
"pygments": {
"hashes": [
"sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a",
"sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d"
],
"version": "==2.3.1"
},
"pymisp": {
"editable": true,
"extras": [
"fileobjects",
"neo",
"openioc",
"virustotal",
"pdfexport"
],
"path": "."
},
"pymispwarninglists": {
"editable": true,
"git": "https://github.com/MISP/PyMISPWarningLists.git",
"ref": "d512ca91ae0635407754933099d6f3dd654dbcfe"
},
"pyopenssl": {
"hashes": [
"sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200",
"sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6"
],
"version": "==19.0.0"
},
"pyrsistent": {
"hashes": [
"sha256:3ca82748918eb65e2d89f222b702277099aca77e34843c5eb9d52451173970e2"
],
"version": "==0.14.11"
},
"python-dateutil": {
"hashes": [
"sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
"sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
],
"version": "==2.8.0"
},
"python-magic": {
"hashes": [
"sha256:f2674dcfad52ae6c49d4803fa027809540b130db1dec928cfbb9240316831375",
"sha256:f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5"
],
"version": "==0.4.15"
},
"pytz": {
"hashes": [
"sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9",
"sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c"
],
"version": "==2018.9"
},
"reportlab": {
"hashes": [
"sha256:069f684cd0aaa518a27dc9124aed29cee8998e21ddf19604e53214ec8462bdd7",
"sha256:09b68ec01d86b4b120456b3f3202570ec96f57624e3a4fc36f3829323391daa4",
"sha256:0c32be9a406172c29ea20ff55a709ccac1e7fb09f15aba67cb7b455fd1d3dbe0",
"sha256:233196cf25e97cfe7c452524ea29d9a4909f1cb66599299233be1efaaaa7a7a3",
"sha256:2b5e4533f3e5b962835a5ce44467e66d1ecc822761d1b508077b5087a06be338",
"sha256:2e860bcdace5a558356802a92ae8658d7e5fdaa00ded82e83a3f2987c562cb66",
"sha256:3546029e63a9a9dc24ee38959eb417678c2425b96cd27b31e09e216dafc94666",
"sha256:4452b93f9c73b6b70311e7d69082d64da81b38e91bfb4766397630092e6da6fd",
"sha256:528c74a1c6527d1859c2c7a64a94a1cba485b00175162ea23699ae58a1e94939",
"sha256:6116e750f98018febc08dfee6df20446cf954adbcfa378d2c703d56c8864aff3",
"sha256:6b2b3580c647d75ef129172cb3da648cdb24566987b0b59c5ebb80ab770748d6",
"sha256:727b5f2bed08552d143fc99649b1863c773729f580a416844f9d9967bb0a1ae8",
"sha256:74c24a3ec0a3d4f8acb13a07192f45bdb54a1cc3c2286241677e7e8bcd5011fa",
"sha256:98ccd2f8b4f8636db05f3f14db0b471ad6bb4b66ae0dc9052c4822b3bd5d6a7d",
"sha256:a5905aa567946bc938b489a7249c7890c3fd3c9b7b5680dece5bc551c2ddbe0d",
"sha256:acbb7f676b8586b770719e9683eda951fdb38eb7970d46fcbf3cdda88d912a64",
"sha256:b5e30f865add48cf880f1c363eb505b97f2f7baaa88c155f87a335a76515a3e5",
"sha256:be2a7c33a2c28bbd3f453ffe4f0e5200b88c803a097f4cf52d69c6b53fad7a8f",
"sha256:c356bb600f59ac64955813d6497a08bfd5d0c451cb5829b61e3913d0ac084e26",
"sha256:c7ec4ae2393beab584921b1287a04e94fd98c28315e348362d89b85f4b464546",
"sha256:d476edc831bb3e9ebd04d1403abaf3ea57b3e4c2276c91a54fdfb6efbd3f9d97",
"sha256:db059e1a0691c872784062421ec51848539eb4f5210142682e61059a5ca7cc55",
"sha256:dd423a6753509ab14a0ac1b5be39d219c8f8d3781cce3deb4f45eda31969b5e8",
"sha256:ed9b7c0d71ce6fe2b31c6cde530ad8238632b876a5d599218739bda142a77f7c",
"sha256:f0a2465af4006f97b05e1f1546d67d3a3213d414894bf28be7f87f550a7f4a55",
"sha256:f20bfe26e57e8e1f575a9e0325be04dd3562db9f247ffdd73b5d4df6dec53bc2",
"sha256:f3463f2cb40a1b515ac0133ba859eca58f53b56760da9abb27ed684c565f853c",
"sha256:facc3c9748ab1525fb8401a1223bce4f24f0d6aa1a9db86c55db75777ccf40f9"
],
"version": "==3.5.13"
},
"requests": {
"hashes": [
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
],
"version": "==2.21.0"
},
"six": {
"hashes": [
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
],
"version": "==1.12.0"
},
"soupsieve": {
"hashes": [
"sha256:afa56bf14907bb09403e5d15fbed6275caa4174d36b975226e3b67a3bb6e2c4b",
"sha256:eaed742b48b1f3e2d45ba6f79401b2ed5dc33b2123dfe216adb90d4bfa0ade26"
],
"version": "==1.8"
},
"urllib3": {
"extras": [
"secure"
],
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
],
"version": "==1.24.1"
},
"validators": {
"hashes": [
"sha256:68e4b74889aac1270d83636cb1dbcce3d2271e291ab14023cf95e7dbfbbce09d"
],
"version": "==0.12.4"
},
"wcwidth": {
"hashes": [
"sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
],
"version": "==0.1.7"
}
},
"develop": {
"certifi": {
"hashes": [
"sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
"sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
],
"version": "==2019.3.9"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"codecov": {
"hashes": [
"sha256:8ed8b7c6791010d359baed66f84f061bba5bd41174bf324c31311e8737602788",
"sha256:ae00d68e18d8a20e9c3288ba3875ae03db3a8e892115bf9b83ef20507732bed4"
],
"index": "pypi",
"version": "==2.0.15"
},
"coverage": {
"hashes": [
"sha256:3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9",
"sha256:39e088da9b284f1bd17c750ac672103779f7954ce6125fd4382134ac8d152d74",
"sha256:3c205bc11cc4fcc57b761c2da73b9b72a59f8d5ca89979afb0c1c6f9e53c7390",
"sha256:465ce53a8c0f3a7950dfb836438442f833cf6663d407f37d8c52fe7b6e56d7e8",
"sha256:48020e343fc40f72a442c8a1334284620f81295256a6b6ca6d8aa1350c763bbe",
"sha256:5296fc86ab612ec12394565c500b412a43b328b3907c0d14358950d06fd83baf",
"sha256:5f61bed2f7d9b6a9ab935150a6b23d7f84b8055524e7be7715b6513f3328138e",
"sha256:68a43a9f9f83693ce0414d17e019daee7ab3f7113a70c79a3dd4c2f704e4d741",
"sha256:6b8033d47fe22506856fe450470ccb1d8ba1ffb8463494a15cfc96392a288c09",
"sha256:7ad7536066b28863e5835e8cfeaa794b7fe352d99a8cded9f43d1161be8e9fbd",
"sha256:7bacb89ccf4bedb30b277e96e4cc68cd1369ca6841bde7b005191b54d3dd1034",
"sha256:839dc7c36501254e14331bcb98b27002aa415e4af7ea039d9009409b9d2d5420",
"sha256:8f9a95b66969cdea53ec992ecea5406c5bd99c9221f539bca1e8406b200ae98c",
"sha256:932c03d2d565f75961ba1d3cec41ddde00e162c5b46d03f7423edcb807734eab",
"sha256:988529edadc49039d205e0aa6ce049c5ccda4acb2d6c3c5c550c17e8c02c05ba",
"sha256:998d7e73548fe395eeb294495a04d38942edb66d1fa61eb70418871bc621227e",
"sha256:9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609",
"sha256:9e80d45d0c7fcee54e22771db7f1b0b126fb4a6c0a2e5afa72f66827207ff2f2",
"sha256:a545a3dfe5082dc8e8c3eb7f8a2cf4f2870902ff1860bd99b6198cfd1f9d1f49",
"sha256:a5d8f29e5ec661143621a8f4de51adfb300d7a476224156a39a392254f70687b",
"sha256:aca06bfba4759bbdb09bf52ebb15ae20268ee1f6747417837926fae990ebc41d",
"sha256:bb23b7a6fd666e551a3094ab896a57809e010059540ad20acbeec03a154224ce",
"sha256:bfd1d0ae7e292105f29d7deaa9d8f2916ed8553ab9d5f39ec65bcf5deadff3f9",
"sha256:c62ca0a38958f541a73cf86acdab020c2091631c137bd359c4f5bddde7b75fd4",
"sha256:c709d8bda72cf4cd348ccec2a4881f2c5848fd72903c185f363d361b2737f773",
"sha256:c968a6aa7e0b56ecbd28531ddf439c2ec103610d3e2bf3b75b813304f8cb7723",
"sha256:df785d8cb80539d0b55fd47183264b7002077859028dfe3070cf6359bf8b2d9c",
"sha256:f406628ca51e0ae90ae76ea8398677a921b36f0bd71aab2099dfed08abd0322f",
"sha256:f46087bbd95ebae244a0eda01a618aff11ec7a069b15a3ef8f6b520db523dcf1",
"sha256:f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260",
"sha256:fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a"
],
"version": "==4.5.3"
},
"coveralls": {
"hashes": [
"sha256:baa26648430d5c2225ab12d7e2067f75597a4b967034bba7e3d5ab7501d207a1",
"sha256:ff9b7823b15070f26f654837bb02a201d006baaf2083e0514ffd3b34a3ffed81"
],
"index": "pypi",
"version": "==1.7.0"
},
"docopt": {
"hashes": [
"sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
],
"version": "==0.6.2"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
],
"version": "==2.8"
},
"nose": {
"hashes": [
"sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac",
"sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a",
"sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"
],
"index": "pypi",
"version": "==1.3.7"
},
"requests": {
"hashes": [
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
],
"version": "==2.21.0"
},
"requests-mock": {
"hashes": [
"sha256:7a5fa99db5e3a2a961b6f20ed40ee6baeff73503cf0a553cc4d679409e6170fb",
"sha256:8ca0628dc66d3f212878932fd741b02aa197ad53fd2228164800a169a4a826af"
],
"index": "pypi",
"version": "==1.5.2"
},
"six": {
"hashes": [
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
],
"version": "==1.12.0"
},
"urllib3": {
"extras": [
"secure"
],
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
],
"version": "==1.24.1"
}
}
}

135
README.md
View File

@ -1,57 +1,58 @@
README
======
[![Documentation Status](https://readthedocs.org/projects/pymisp/badge/?version=latest)](http://pymisp.readthedocs.io/?badge=latest)
[![Build Status](https://travis-ci.org/MISP/PyMISP.svg?branch=master)](https://travis-ci.org/MISP/PyMISP)
[![Coverage Status](https://coveralls.io/repos/github/MISP/PyMISP/badge.svg?branch=master)](https://coveralls.io/github/MISP/PyMISP?branch=master)
[![Python 3.6](https://img.shields.io/badge/python-3.6+-blue.svg)](https://www.python.org/downloads/release/python-360/)
[![PyPi version](https://img.shields.io/pypi/v/pymisp.svg)](https://pypi.python.org/pypi/pymisp/)
[![Number of PyPI downloads](https://pypip.in/d/pymisp/badge.png)](https://pypi.python.org/pypi/pymisp/)
**IMPORTANT NOTE**: This library will require **at least** Python 3.10 starting the 1st of January 2024. If you have legacy versions of python, please use the latest PyMISP version that will be released in December 2023, and consider updating your system(s). Anything released within the last 2 years will do, starting with Ubuntu 22.04.
# PyMISP - Python Library to access MISP
[![Documentation Status](https://readthedocs.org/projects/pymisp/badge/?version=latest)](http://pymisp.readthedocs.io/?badge=latest)
[![Coverage Status](https://coveralls.io/repos/github/MISP/PyMISP/badge.svg?branch=main)](https://coveralls.io/github/MISP/PyMISP?branch=main)
[![Python 3.8](https://img.shields.io/badge/python-3.8+-blue.svg)](https://www.python.org/downloads/release/python-380/)
[![PyPi version](https://img.shields.io/pypi/v/pymisp.svg)](https://pypi.python.org/pypi/pymisp/)
[![Number of PyPI downloads](https://img.shields.io/pypi/dm/pymisp.svg)](https://pypi.python.org/pypi/pymisp/)
PyMISP is a Python library to access [MISP](https://github.com/MISP/MISP) platforms via their REST API.
PyMISP allows you to fetch events, add or update events/attributes, add or update samples or search for attributes.
## Requirements
* [requests](http://docs.python-requests.org)
## Install from pip
**It is strongly recommended to use a virtual environment**
If you want to know more about virtual environments, [python has you covered](https://docs.python.org/3/tutorial/venv.html)
Only basic dependencies:
```
pip3 install pymisp
```
## Install the latest version from repo
And there are a few optional dependencies:
* fileobjects: to create PE/ELF/Mach-o objects
* openioc: to import files in OpenIOC format (not really maintained)
* virustotal: to query VirusTotal and generate the appropriate objects
* docs: to generate te documentation
* pdfexport: to generate PDF reports out of MISP events
* url: to generate URL objects out of URLs with Pyfaup
* email: to generate MISP Email objects
* brotli: to use the brotli compression when interacting with a MISP instance
Example:
```
pip3 install pymisp[virustotal,email]
```
## Install the latest version from repo from development purposes
**Note**: poetry is required; e.g., "pip3 install poetry"
```
git clone https://github.com/MISP/PyMISP.git && cd PyMISP
git submodule update --init
pip3 install -I .[fileobjects,neo,openioc,virustotal]
poetry install -E fileobjects -E openioc -E virustotal -E docs -E pdfexport -E email
```
## Installing it with virtualenv
It is recommended to use virtualenv to not polute your OS python envirenment.
```
pip3 install virtualenv
git clone https://github.com/MISP/PyMISP.git && cd PyMISP
python3 -m venv ./
source venv/bin/activate
git submodule update --init
pip3 install -I .[fileobjects,neo,openioc,virustotal]
```
## Running the tests
### Running the tests
```bash
pip3 install -U nose pip setuptools coveralls codecov requests-mock
pip3 install git+https://github.com/kbandla/pydeep.git
git clone https://github.com/viper-framework/viper-test-files.git tests/viper-test-files
nosetests-3.4 --with-coverage --cover-package=pymisp,tests --cover-tests tests/test_*.py
poetry run pytest --cov=pymisp tests/test_*.py
```
If you have a MISP instance to test against, you can also run the live ones:
@ -59,7 +60,7 @@ If you have a MISP instance to test against, you can also run the live ones:
**Note**: You need to update the key in `tests/testlive_comprehensive.py` to the automation key of your admin account.
```bash
nosetests-3.4 --with-coverage --cover-package=pymisp,tests --cover-tests tests/testlive_comprehensive.py
poetry run pytest --cov=pymisp tests/testlive_comprehensive.py
```
## Samples and how to use PyMISP
@ -89,7 +90,7 @@ python3 last.py -l 45m # 45 minutes
## Debugging
You have two options there:
You have two options here:
1. Pass `debug=True` to `PyMISP` and it will enable logging.DEBUG to stderr on the whole module
@ -100,7 +101,7 @@ You have two options there:
import logging
logger = logging.getLogger('pymisp')
# Configure it as you whish, for example, enable DEBUG mode:
# Configure it as you wish, for example, enable DEBUG mode:
logger.setLevel(logging.DEBUG)
```
@ -114,31 +115,37 @@ logger = logging.getLogger('pymisp')
logging.basicConfig(level=logging.DEBUG, filename="debug.log", filemode='w', format=pymisp.FORMAT)
```
## Test cases
1. The content of `mispevent.py` is tested on every commit
2. The test cases that require a running MISP instance can be run the following way:
```bash
# From poetry
pytest --cov=pymisp tests/test_*.py tests/testlive_comprehensive.py:TestComprehensive.[test_name]
```
## Documentation
[PyMISP API documentation is available](https://media.readthedocs.org/pdf/pymisp/latest/pymisp.pdf).
Documentation can be generated with epydoc:
```
epydoc --url https://github.com/MISP/PyMISP --graph all --name PyMISP --pdf pymisp -o doc
```
The documentation is available [here](https://pymisp.readthedocs.io/en/latest/).
### Jupyter notebook
A series of [Jupyter notebooks for PyMISP tutorial](https://github.com/MISP/PyMISP/tree/master/docs/tutorial) are available in the repository.
A series of [Jupyter notebooks for PyMISP tutorial](https://github.com/MISP/PyMISP/tree/main/docs/tutorial) are available in the repository.
## Everything is a Mutable Mapping
... or at least everything that can be imported/exported from/to a json blob
`AbstractMISP` is the master class, and inherit `collections.MutableMapping` which means
`AbstractMISP` is the master class, and inherits from `collections.MutableMapping` which means
the class can be represented as a python dictionary.
The abstraction assumes every property that should not be seen in the dictionary is prepended with a `_`,
or its name is added to the private list `__not_jsonable` (accessible through `update_not_jsonable` and `set_not_jsonable`.
This master class has helpers that will make it easy to load, and export, to, and from, a json string.
This master class has helpers that make it easy to load, and export to, and from, a json string.
`MISPEvent`, `MISPAttribute`, `MISPObjectReference`, `MISPObjectAttribute`, and `MISPObject`
are subclasses of AbstractMISP, which mean that they can be handled as python dictionaries.
@ -147,6 +154,40 @@ are subclasses of AbstractMISP, which mean that they can be handled as python di
Creating a new MISP object generator should be done using a pre-defined template and inherit `AbstractMISPObjectGenerator`.
Your new MISPObject generator need to generate attributes, and add them as class properties using `add_attribute`.
Your new MISPObject generator must generate attributes and add them as class properties using `add_attribute`.
When the object is sent to MISP, all the class properties will be exported to the JSON export.
## Installing PyMISP on a machine with no internet access
This is done using poetry and you need to have this repository cloned on your machine.
The commands below have to be run from inside the cloned directory.
1. From a machine with access to the internet, get the dependencies:
```bash
mkdir offline
poetry export --all-extras > offline/requirements.txt
poetry run pip download -r offline/requirements.txt -d offline/packages/
```
2. Prepare the PyMISP Package
```bash
poetry build
mv dist/*.whl offline/packages/
```
3. Copy the content of `offline/packages/` to the machine with no internet access.
4. Install the packages:
```bash
python -m pip install --no-index --no-deps packages/*.whl
```
# License
PyMISP is distributed under an [open source license](./LICENSE). A simplified 2-BSD license.

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# PyMISP documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 26 11:39:17 2016.
@ -39,6 +38,8 @@ extensions = [
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
'sphinx.ext.imgconverter',
'recommonmark',
]
napoleon_google_docstring = False
@ -75,9 +76,9 @@ author = 'Raphaël Vinot'
# built documents.
#
# The short X.Y version.
version = 'master'
version = 'main'
# The full version, including alpha/beta/rc tags.
release = 'master'
release = 'main'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -131,6 +132,9 @@ pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# lief is a bit difficult to install
autodoc_mock_imports = ["lief"]
# -- Options for HTML output ----------------------------------------------
@ -439,7 +443,3 @@ epub_exclude_files = ['search.html']
# If false, no index is generated.
#
# epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}

View File

@ -9,7 +9,7 @@ Welcome to PyMISP's documentation!
Contents:
.. toctree::
:maxdepth: 4
:maxdepth: 2
README
modules

View File

@ -1,5 +1,5 @@
pymisp
======
pymisp - Classes
================
.. toctree::
:maxdepth: 4
@ -14,12 +14,6 @@ PyMISP
.. autoclass:: PyMISP
:members:
PyMISPExpanded (Python 3.6+ only)
---------------------------------
.. autoclass:: ExpandedPyMISP
:members:
MISPAbstract
------------
@ -39,6 +33,20 @@ MISPEvent
:members:
:inherited-members:
MISPEventBlocklist
------------------
.. autoclass:: MISPEventBlocklist
:members:
:inherited-members:
MISPEventDelegation
-------------------
.. autoclass:: MISPEventDelegation
:members:
:inherited-members:
MISPAttribute
-------------
@ -67,6 +75,13 @@ MISPObjectReference
:members:
:inherited-members:
MISPObjectTemplate
------------------
.. autoclass:: MISPObjectTemplate
:members:
:inherited-members:
MISPTag
-------
@ -81,6 +96,12 @@ MISPUser
:members:
:inherited-members:
MISPUserSetting
---------------
.. autoclass:: MISPUserSetting
:members:
:inherited-members:
MISPOrganisation
----------------
@ -89,3 +110,87 @@ MISPOrganisation
:members:
:inherited-members:
MISPOrganisationBlocklist
-------------------------
.. autoclass:: MISPOrganisationBlocklist
:members:
:inherited-members:
MISPFeed
--------
.. autoclass:: MISPFeed
:members:
:inherited-members:
MISPInbox
---------
.. autoclass:: MISPInbox
:members:
:inherited-members:
MISPLog
-------
.. autoclass:: MISPLog
:members:
:inherited-members:
MISPNoticelist
--------------
.. autoclass:: MISPNoticelist
:members:
:inherited-members:
MISPRole
--------
.. autoclass:: MISPRole
:members:
:inherited-members:
MISPServer
----------
.. autoclass:: MISPServer
:members:
:inherited-members:
MISPShadowAttribute
-------------------
.. autoclass:: MISPShadowAttribute
:members:
:inherited-members:
MISPSharingGroup
----------------
.. autoclass:: MISPSharingGroup
:members:
:inherited-members:
MISPSighting
------------
.. autoclass:: MISPSighting
:members:
:inherited-members:
MISPTaxonomy
------------
.. autoclass:: MISPTaxonomy
:members:
:inherited-members:
MISPWarninglist
---------------
.. autoclass:: MISPWarninglist
:members:
:inherited-members:

View File

@ -7,10 +7,10 @@
"outputs": [],
"source": [
"# The URL of the MISP instance to connect to\n",
"misp_url = 'http://127.0.0.1:8080'\n",
"misp_url = 'https://127.0.0.1:8443'\n",
"# Can be found in the MISP web interface under ||\n",
"# http://+MISP_URL+/users/view/me -> Authkey\n",
"misp_key = 'LBelWqKY9SQyG0huZzAMqiEBl6FODxpgRRXMsZFu'\n",
"misp_key = 'd6OmdDFvU3Seau3UjwvHS1y3tFQbaRNhJhDX0tjh'\n",
"# Should PyMISP verify the MISP certificate\n",
"misp_verifycert = False"
]
@ -52,9 +52,9 @@
"metadata": {},
"outputs": [],
"source": [
"from pymisp import ExpandedPyMISP\n",
"from pymisp import PyMISP\n",
"\n",
"misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, debug=False)"
"misp = PyMISP(misp_url, misp_key, misp_verifycert, debug=False)"
]
},
{
@ -70,7 +70,7 @@
"source": [
"## Search unpublished events\n",
"\n",
"**WARNING**: By default, the search query will only return all the events listed on teh index page"
"**WARNING**: By default, the search query will only return all the events listed on the index page"
]
},
{
@ -79,7 +79,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(published=False)\n",
"r = misp.search(published=False, metadata=True)\n",
"print(r)"
]
},
@ -96,7 +96,16 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(eventid=[17217, 1717, 1721, 17218])"
"r = misp.search(eventid=[1,2,3], metadata=True, pythonify=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r"
]
},
{
@ -112,7 +121,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(tags=['tlp:white'], pythonify=True)\n",
"r = misp.search(tags=['tlp:white'], metadata=True, pythonify=True)\n",
"for e in r:\n",
" print(e)"
]
@ -123,7 +132,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(tag='TODO:VT-ENRICHMENT', published=False)"
"print('No attributes are in the event', r[0].attributes)"
]
},
{
@ -132,7 +141,16 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(tag=['!TODO:VT-ENRICHMENT', 'tlp:white'], published=False) # ! means \"not this tag\""
"r = misp.search(tags='TODO:VT-ENRICHMENT', published=False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(tags=['!TODO:VT-ENRICHMENT', 'tlp:white'], metadata=True, published=False) # ! means \"not this tag\""
]
},
{
@ -148,7 +166,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(eventinfo='circl')"
"r = misp.search(eventinfo='circl', metadata=True)"
]
},
{
@ -164,7 +182,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(org='CIRCL')"
"r = misp.search(org='CIRCL', metadata=True)"
]
},
{
@ -180,7 +198,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search_index(timestamp='1h')"
"r = misp.search(timestamp='1h', metadata=True)"
]
},
{
@ -227,6 +245,28 @@
"complex_query = misp.build_complex_query(or_parameters=['uibo.lembit@mail.ee', '103.195.185.222'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(complex_query)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"complex_query = misp.build_complex_query(or_parameters=['59.157.4.2', 'hotfixmsupload.com', '8.8.8.8'])\n",
"events = misp.search(value=complex_query, pythonify=True)\n",
"\n",
"for e in events:\n",
" print(e)"
]
},
{
"cell_type": "code",
"execution_count": null,
@ -318,6 +358,24 @@
"r = misp.search(value='8.8.8.8', withAttachments=True) # Return attachments"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(tags=['%tlp:amber%'], pythonify=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(r[0].tags)"
]
},
{
"cell_type": "markdown",
"metadata": {},
@ -331,7 +389,7 @@
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(controller='attributes', value='8.8.8.9')"
"r = misp.search(controller='attributes', value='8.8.8.8')"
]
},
{
@ -349,14 +407,7 @@
"metadata": {},
"outputs": [],
"source": [
"r"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Because reason"
"print(r)"
]
},
{
@ -365,22 +416,146 @@
"metadata": {},
"outputs": [],
"source": [
"tag_to_remove = 'foo'\n",
"# Search attributes (specified in controller) where the attribute type is 'ip-src'\n",
"# And the to_ids flag is set\n",
"attributes = misp.search(controller='attributes', type_attribute='ip-src', to_ids=0, pythonify=True)\n",
"\n",
"events = misp.search(tags=tag_to_remove, pythonify=True)\n",
"event_ids = set()\n",
"for attr in attributes:\n",
" event_ids.add(attr.event_id)\n",
"\n",
"for event in events:\n",
" for tag in event.tags:\n",
" if tag.name == tag_to_remove:\n",
" print(f'Got {tag_to_remove} in {event.info}')\n",
" misp.untag(event.uuid, tag_to_remove)\n",
" break\n",
" for attribute in event.attributes:\n",
" for tag in attribute.tags:\n",
" if tag.name == tag_to_remove:\n",
" print(f'Got {tag_to_remove} in {attribute.value}')\n",
" misp.untag(attribute.uuid, tag_to_remove)\n",
" break"
"# Fetch all related events\n",
"for event_id in event_ids:\n",
" event = misp.get_event(event_id)\n",
" print(event.info)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Last *published* attributes"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"attributes = misp.search(controller='attributes', publish_timestamp='1d', pythonify=True)\n",
"\n",
"for attribute in attributes:\n",
" print(attribute.event_id, attribute)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"attributes = misp.search(controller='attributes', publish_timestamp=['2d', '1h'], pythonify=True)\n",
"\n",
"for a in attributes:\n",
" print(a)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Last *updated* attributes"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"from datetime import datetime\n",
"\n",
"ts = int(datetime.now().timestamp())\n",
"\n",
"attributes = misp.search(controller='attributes', timestamp=ts - 36000, pythonify=True)\n",
"\n",
"for a in attributes:\n",
" print(a)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Orther output formats\n",
"\n",
"**Warning**: For that to work, the matching event has to be published"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(controller='attributes', value='8.8.8.8', return_format='csv')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(controller='events', value='9.8.8.8', return_format='snort')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(controller='events', value='9.8.8.8', return_format='suricata')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(controller='events', value='9.8.8.8', return_format='stix')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"r = misp.search(controller='events', value='9.8.8.8', return_format='stix2')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"print(r)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Search in logs"
]
},
{
@ -410,17 +585,7 @@
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"log = misp.search_logs(model='Tag', title=tag_to_remove)[0]\n",
"roles = misp.get_roles_list()\n",
"for r in roles:\n",
" if r['Role']['name'] == 'User':\n",
" new_role = r['Role']['id']\n",
" break\n",
"user = misp.get_user(log['Log']['user_id'])\n",
"user['User']['role_id'] = new_role\n",
"misp.edit_user(user['User']['id'], **user['User'])"
]
"source": []
}
],
"metadata": {
@ -439,7 +604,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.7.5"
}
},
"nbformat": 4,

File diff suppressed because one or more lines are too long

View File

@ -358,9 +358,10 @@
"# And the to_ids flag is set\n",
"attributes = misp.search(controller='attributes', type_attribute='ip-src', to_ids=0, pythonify=True)\n",
"\n",
"# Collect all event_id matching the searched attribute\n",
"event_ids = set()\n",
"for attr in attributes:\n",
" event_ids.add(event_id)\n",
" event_ids.add(attr.event_id)\n",
"\n",
"# Fetch all related events\n",
"for event_id in event_ids:\n",
@ -499,7 +500,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.7.3"
}
},
"nbformat": 4,

View File

@ -70,7 +70,7 @@
"source": [
"## Search unpublished events\n",
"\n",
"**WARNING**: By default, the search query will only return all the events listed on teh index page"
"**WARNING**: By default, the search query will only return all the events listed on the index page"
]
},
{
@ -457,7 +457,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.7.3"
}
},
"nbformat": 4,

View File

@ -480,7 +480,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.7.3"
}
},
"nbformat": 4,

View File

@ -0,0 +1,74 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import csv
from pymisp import PyMISP
from pymisp import ExpandedPyMISP, MISPAttribute
from keys import misp_url, misp_key, misp_verifycert
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import argparse
import urllib3
import requests
requests.packages.urllib3.disable_warnings()
"""
Sample usage:
python3 add_filetype_object_from_csv.py -e <Event_UUID> -f <formated_file_with_attributes>.csv
Attribute CSV file (aach line is an entry):
value;category;type;comment;to_ids;first_seen;last_seen;tag1;tag2
test.pdf;Payload delivery;filename;Email attachment;0;1970-01-01;1970-01-01;tlp:green;ransomware
127.0.0.1;Network activity;ip-dst;C2 server;1;;;tlp:white;
value = IOC's value
category = its MISP category (https://www.circl.lu/doc/misp/categories-and-types/)
type = its MISP type (https://www.circl.lu/doc/misp/categories-and-types/)
comment = IOC's description
to_ids = Boolean expected (0 = IDS flag not checked // 1 = IDS flag checked)
first_seen = First seen date, if any (left empty if not)
last_seen = Last seen date, if any (left empty if not)
tag = IOC tag, if any
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Add attributes to a MISP event from a semi-colon formated csv file')
parser.add_argument("-e", "--event_uuid", required=True, help="Event UUID to update")
parser.add_argument("-f", "--attr_file", required=True, help="Attribute CSV file path")
args = parser.parse_args()
pymisp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
f = open(args.attr_file, newline='')
csv_reader = csv.reader(f, delimiter=";")
for line in csv_reader:
value = line[0]
category = line[1]
type = line[2]
comment = line[3]
ids = line[4]
fseen = line[5]
lseen = line[6]
tags = line[7:]
misp_attribute = MISPAttribute()
misp_attribute.value = str(value)
misp_attribute.category = str(category)
misp_attribute.type = str(type)
misp_attribute.comment = str(comment)
misp_attribute.to_ids = str(ids)
if fseen != '':
misp_attribute.first_seen = str(fseen)
if lseen != '':
misp_attribute.last_seen = str(lseen)
for x in tags:
misp_attribute.add_tag(x)
r = pymisp.add_attribute(args.event_uuid, misp_attribute)
print(line)
print("\nAttributes successfully saved :)")

View File

@ -1,10 +1,9 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp.tools import EMailObject
import traceback
from keys import misp_url, misp_key, misp_verifycert
from keys import misp_url, misp_key, misp_verifycert # type: ignore
import glob
import argparse
@ -20,12 +19,11 @@ if __name__ == '__main__':
for f in glob.glob(args.path):
try:
eo = EMailObject(f)
except Exception as e:
except Exception:
traceback.print_exc()
continue
if eo:
template_id = pymisp.get_object_template_id(eo.template_uuid)
response = pymisp.add_object(args.event, template_id, eo)
response = pymisp.add_object(args.event, eo, pythonify=True)
for ref in eo.ObjectReference:
r = pymisp.add_object_reference(ref)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pymisp import PyMISP, MISPEvent
from pymisp import ExpandedPyMISP, MISPEvent
from pymisp.tools import Fail2BanObject
import argparse
from base64 import b64decode
@ -43,23 +43,23 @@ if __name__ == '__main__':
parser.add_argument("-d", "--disable_new", action='store_true', default=False, help="Do not create a new Event.")
args = parser.parse_args()
pymisp = PyMISP(misp_url, misp_key, misp_verifycert, debug=True)
pymisp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, debug=True)
event_id = -1
me = None
if args.force_new:
me = create_new_event()
else:
response = pymisp.search_index(tag=args.tag, timestamp='1h')
if response['response']:
response = pymisp.search_index(tags=args.tag, timestamp='1h', pythonify=True)
if response:
if args.disable_new:
event_id = response['response'][0]['id']
event_id = response[0].id
else:
last_event_date = parse(response['response'][0]['date']).date()
nb_attr = response['response'][0]['attribute_count']
last_event_date = parse(response[0].date).date()
nb_attr = response[0].attribute_count
if last_event_date < date.today() or int(nb_attr) > 1000:
me = create_new_event()
else:
event_id = response['response'][0]['id']
event_id = response[0].id
else:
me = create_new_event()
@ -83,5 +83,4 @@ if __name__ == '__main__':
me.add_object(f2b)
pymisp.add_event(me)
elif event_id:
template_id = pymisp.get_object_template_id(f2b.template_uuid)
a = pymisp.add_object(event_id, template_id, f2b)
a = pymisp.add_object(event_id, f2b)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP, MISPFeed
from keys import misp_url, misp_key, misp_verifycert
import argparse
@ -14,6 +14,12 @@ if __name__ == '__main__':
parser.add_argument("-p", "--provider", required=True, help="Provider name")
args = parser.parse_args()
pm = PyMISP(misp_url, misp_key, misp_verifycert, debug=True)
response = pm.add_feed(args.format, args.url, args.name, args.input, args.provider)
print(response)
pm = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, debug=True)
feed = MISPFeed()
feed.format = args.format
feed.url = args.url
feed.name = args.name
feed.input = args.input
feed.provider = args.provider
response = pm.add_feed(feed, pythonify=True)
print(response.to_json())

View File

@ -19,23 +19,29 @@ if __name__ == '__main__':
for f in glob.glob(args.path):
try:
fo, peo, seos = make_binary_objects(f)
except Exception as e:
except Exception:
traceback.print_exc()
continue
if seos:
for s in seos:
template_id = pymisp.get_object_template_id(s.template_uuid)
r = pymisp.add_object(args.event, template_id, s)
r = pymisp.add_object(args.event, s)
if peo:
template_id = pymisp.get_object_template_id(peo.template_uuid)
r = pymisp.add_object(args.event, template_id, peo)
if hasattr(peo, 'certificates') and hasattr(peo, 'signers'):
# special authenticode case for PE objects
for c in peo.certificates:
pymisp.add_object(args.event, c, pythonify=True)
for s in peo.signers:
pymisp.add_object(args.event, s, pythonify=True)
del peo.certificates
del peo.signers
del peo.sections
r = pymisp.add_object(args.event, peo, pythonify=True)
for ref in peo.ObjectReference:
r = pymisp.add_object_reference(ref)
if fo:
template_id = pymisp.get_object_template_id(fo.template_uuid)
response = pymisp.add_object(args.event, template_id, fo)
response = pymisp.add_object(args.event, fo, pythonify=True)
for ref in fo.ObjectReference:
r = pymisp.add_object_reference(ref)

View File

@ -0,0 +1,53 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import csv
from pymisp import ExpandedPyMISP, MISPObject
from keys import misp_url, misp_key, misp_verifycert
import argparse
"""
Sample usage:
python3 ./add_filetype_object_from_csv.py -e 77bcc9f4-21a8-4252-9353-f4615d6121e3 -f ./attributes.csv
Attribute csv file (2 lines. Each line will be a file MISP Object):
test.pdf;6ff19f8b680df260883d61d7c00db14a8bc57aa0;ea307d60ad0bd1df83ab5119df0bf638;b6c9903c9c38400345ad21faa2df50211d8878c96079c43ae64f35b17c9f74a1
test2.xml;0dcef3d68f43e2badb0bfe3d47fd19633264cd1d;15f453625882f6123e239c9ce2b0fe24;b064514fcc52a769e064c4d61ce0c554fbc81e446af31dddac810879a5ca5b17
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create a file type MISP Object starting from attributes in a csv file')
parser.add_argument("-e", "--event_uuid", required=True, help="Event UUID to update")
parser.add_argument("-f", "--attr_file", required=True, help="Attribute CSV file path")
args = parser.parse_args()
pymisp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
f = open(args.attr_file, newline='')
csv_reader = csv.reader(f, delimiter=";")
for line in csv_reader:
filename = line[0]
sha1 = line[1]
md5 = line[2]
sha256 = line[3]
misp_object = MISPObject(name='file', filename=filename)
obj1 = misp_object.add_attribute("filename", value = filename)
obj1.add_tag('tlp:green')
obj2 = misp_object.add_attribute("sha1", value = sha1)
obj2.add_tag('tlp:amber')
obj3 = misp_object.add_attribute("md5", value = md5)
obj3.add_tag('tlp:amber')
obj4 = misp_object.add_attribute("sha256", value = sha256)
obj4.add_tag('tlp:amber')
r = pymisp.add_object(args.event_uuid, misp_object)
print(line)
print("\nObjects created :)")

View File

@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-
import json
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from pymisp.tools import GenericObjectGenerator
from keys import misp_url, misp_key, misp_verifycert
import argparse
@ -19,21 +19,8 @@ if __name__ == '__main__':
parser.add_argument("-l", "--attr_list", required=True, help="List of attributes")
args = parser.parse_args()
pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
template = pymisp.get_object_templates_list()
if 'response' in template.keys():
template = template['response']
try:
template_ids = [x['ObjectTemplate']['id'] for x in template if x['ObjectTemplate']['name'] == args.type]
if len(template_ids) > 0:
template_id = template_ids[0]
else:
raise IndexError
except IndexError:
valid_types = ", ".join([x['ObjectTemplate']['name'] for x in template])
print ("Template for type %s not found! Valid types are: %s" % (args.type, valid_types))
exit()
pymisp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
misp_object = GenericObjectGenerator(args.type.replace("|", "-"))
misp_object.generate_attributes(json.loads(args.attr_list))
r = pymisp.add_object(args.event, template_id, misp_object)
r = pymisp.add_object(args.event, misp_object)

65
examples/add_github_user.py Executable file
View File

@ -0,0 +1,65 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import MISPObject
from pymisp.tools import update_objects
from keys import misp_url, misp_key, misp_verifycert
import argparse
import requests
import sys
"""
usage: add_github_user.py [-h] -e EVENT [-f] -u USERNAME
Fetch GitHub user details and add it in object in MISP
optional arguments:
-h, --help show this help message and exit
-e EVENT, --event EVENT
Event ID to update
-f, --force-template-update
-u USERNAME, --username USERNAME
GitHub username to add
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Fetch GitHub user details and add it in object in MISP')
parser.add_argument("-e", "--event", required=True, help="Event ID to update")
parser.add_argument("-f", "--force-template-update", required=False, action="store_true")
parser.add_argument("-u", "--username", required=True, help="GitHub username to add")
args = parser.parse_args()
r = requests.get("https://api.github.com/users/{}".format(args.username))
if r.status_code != 200:
sys.exit("HTTP return is {} and not 200 as expected".format(r.status_code))
if args.force_template_update:
print("Updating MISP Object templates...")
update_objects()
pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
misp_object = MISPObject(name="github-user")
github_user = r.json()
rfollowers = requests.get(github_user['followers_url'])
followers = rfollowers.json()
rfollowing = requests.get("https://api.github.com/users/{}/following".format(args.username))
followings = rfollowing.json()
rkeys = requests.get("https://api.github.com/users/{}/keys".format(args.username))
keys = rkeys.json()
misp_object.add_attributes("follower", *[follower['login'] for follower in followers])
misp_object.add_attributes("following", *[following['login'] for following in followings])
misp_object.add_attributes("ssh-public-key", *[sshkey['key'] for sshkey in keys])
misp_object.add_attribute('bio', github_user['bio'])
misp_object.add_attribute('link', github_user['html_url'])
misp_object.add_attribute('user-fullname', github_user['name'])
misp_object.add_attribute('username', github_user['login'])
misp_object.add_attribute('twitter_username', github_user['twitter_username'])
misp_object.add_attribute('location', github_user['location'])
misp_object.add_attribute('company', github_user['company'])
misp_object.add_attribute('public_gists', github_user['public_gists'])
misp_object.add_attribute('public_repos', github_user['public_repos'])
misp_object.add_attribute('blog', github_user['blog'])
misp_object.add_attribute('node_id', github_user['node_id'])
retcode = pymisp.add_object(args.event, misp_object)

56
examples/add_gitlab_user.py Executable file
View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import MISPObject
from pymisp.tools import update_objects
from keys import misp_url, misp_key, misp_verifycert
import argparse
import requests
import sys
"""
usage: add_gitlab_user.py [-h] -e EVENT [-f] -u USERNAME [-l LINK]
Fetch GitLab user details and add it in object in MISP
optional arguments:
-h, --help show this help message and exit
-e EVENT, --event EVENT
Event ID to update
-f, --force-template-update
-u USERNAME, --username USERNAME
GitLab username to add
-l LINK, --link LINK Url to access the GitLab instance, Default is
www.gitlab.com.
"""
default_url = "http://www.gitlab.com/"
parser = argparse.ArgumentParser(description='Fetch GitLab user details and add it in object in MISP')
parser.add_argument("-e", "--event", required=True, help="Event ID to update")
parser.add_argument("-f", "--force-template-update", required=False, action="store_true")
parser.add_argument("-u", "--username", required=True, help="GitLab username to add")
parser.add_argument("-l", "--link", required=False, help="Url to access the GitLab instance, Default is www.gitlab.com.", default=default_url)
args = parser.parse_args()
r = requests.get("{}api/v4/users?username={}".format(args.link, args.username))
if r.status_code != 200:
sys.exit("HTTP return is {} and not 200 as expected".format(r.status_code))
if args.force_template_update:
print("Updating MISP Object templates...")
update_objects()
gitlab_user = r.json()[0]
pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
print(gitlab_user)
misp_object = MISPObject(name="gitlab-user")
misp_object.add_attribute('username', gitlab_user['username'])
misp_object.add_attribute('id', gitlab_user['id'])
misp_object.add_attribute('name', gitlab_user['name'])
misp_object.add_attribute('state', gitlab_user['state'])
misp_object.add_attribute('avatar_url', gitlab_user['avatar_url'])
misp_object.add_attribute('web_url', gitlab_user['web_url'])
retcode = pymisp.add_object(args.event, misp_object)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
@ -12,9 +12,6 @@ except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json', debug=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Add an attribute to an event')
parser.add_argument("-e", "--event", help="The id, uuid or json of the event to update.")
@ -22,7 +19,7 @@ if __name__ == '__main__':
parser.add_argument("-v", "--value", help="The value of the attribute")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
event = misp.add_named_attribute(args.event, args.type, args.value)
event = misp.add_attribute(args.event, {'type': args.type, 'value': args.value}, pythonify=True)
print(event)

View File

@ -0,0 +1,57 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import ExpandedPyMISP, MISPOrganisation, MISPSharingGroup
from keys import misp_url, misp_key, misp_verifycert
import argparse
import csv
# Suppress those "Unverified HTTPS request is being made"
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Add organizations from a CSV file')
parser.add_argument("-c", "--csv-import", required=True, help="The CSV file containing the organizations. Format 'orgname,nationality,sector,type,contacts,uuid,local,sharingroup_uuid'")
args = parser.parse_args()
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
# CSV format
# orgname,nationality,sector,type,contacts,uuid,local,sharingroup
with open(args.csv_import) as csv_file:
count_orgs = 0
csv_reader = csv.reader(csv_file, delimiter=',')
for row in csv_reader:
org = MISPOrganisation()
org.name = row[0]
print("Process {}".format(org.name))
org.nationality = row[1]
org.sector = row[2]
org.type = row[3]
org.contacts = row[4]
org.uuid = row[5]
org.local = row[6]
add_org = misp.add_organisation(org, pythonify=True)
if 'errors' in add_org:
print(add_org['errors'])
else:
count_orgs = count_orgs + 1
org_uuid = add_org.uuid
if org_uuid:
sharinggroup = MISPSharingGroup()
sharinggroup_uuid = row[7]
if sharinggroup_uuid:
sharinggroup.uuid = sharinggroup_uuid
add_sharing = misp.add_org_to_sharing_group(sharinggroup, org)
else:
print("Organisation {} not added to sharing group, missing sharing group uuid".format(org.name))
print("Import finished, {} organisations added".format(count_orgs))

View File

@ -1,16 +0,0 @@
import json
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
from pymisp.tools import SBSignatureObject
pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
a = json.loads('{"signatures":[{"new_data":[],"confidence":100,"families":[],"severity":1,"weight":0,"description":"AttemptstoconnecttoadeadIP:Port(2uniquetimes)","alert":false,"references":[],"data":[{"IP":"95.101.39.58:80(Europe)"},{"IP":"192.35.177.64:80(UnitedStates)"}],"name":"dead_connect"},{"new_data":[],"confidence":30,"families":[],"severity":2,"weight":1,"description":"PerformssomeHTTPrequests","alert":false,"references":[],"data":[{"url":"http://cert.int-x3.letsencrypt.org/"},{"url":"http://apps.identrust.com/roots/dstrootcax3.p7c"}],"name":"network_http"},{"new_data":[],"confidence":100,"families":[],"severity":2,"weight":1,"description":"Theofficefilehasaunconventionalcodepage:ANSICyrillic;Cyrillic(Windows)","alert":false,"references":[],"data":[],"name":"office_code_page"}]}')
a = [(x['name'], x['description']) for x in a["signatures"]]
b = SBSignatureObject(a)
template_id = [x['ObjectTemplate']['id'] for x in pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == 'sb-signature'][0]
pymisp.add_object(234111, template_id, b)

View File

@ -0,0 +1,29 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pymisp import ExpandedPyMISP
from pymisp.tools import SSHAuthorizedKeysObject
import traceback
from keys import misp_url, misp_key, misp_verifycert
import glob
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extract indicators out of authorized_keys file.')
parser.add_argument("-e", "--event", required=True, help="Event ID to update.")
parser.add_argument("-p", "--path", required=True, help="Path to process (expanded using glob).")
args = parser.parse_args()
pymisp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, debug=True)
for f in glob.glob(args.path):
try:
auth_keys = SSHAuthorizedKeysObject(f)
except Exception:
traceback.print_exc()
continue
response = pymisp.add_object(args.event, auth_keys, pythonify=True)
for ref in auth_keys.ObjectReference:
r = pymisp.add_object_reference(ref)

View File

@ -1,20 +1,10 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP, MISPUser
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Add a new user by setting the mandory fields.')
parser.add_argument("-e", "--email", required=True, help="Email linked to the account.")
@ -22,6 +12,11 @@ if __name__ == '__main__':
parser.add_argument("-r", "--role_id", required=True, help="Role linked to the user.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, 'json')
print (misp.add_user(args.email, args.org_id, args.role_id))
user = MISPUser()
user.email = args.email
user.org_id = args.org_id
user.role_id = args.role_id
print(misp.add_user(user, pythonify=True))

View File

@ -1,28 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Add the user described in the given json. If no file is provided, returns a json listing all the fields used to describe a user.')
parser.add_argument("-f", "--json_file", help="The name of the json file describing the user you want to create.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.json_file is None:
print (misp.get_add_user_fields_list())
else:
print(misp.add_user_json(args.json_file))

View File

@ -1,36 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import os
import json
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
result = m.get_event(event)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get an event from a MISP instance.')
parser.add_argument("-e", "--event", required=True, help="Event ID to get.")
parser.add_argument("-a", "--attribute", help="Attribute ID to modify. A little dirty for now, argument need to be included in event")
parser.add_argument("-t", "--tag", required=True, type=int, help="Attribute ID to modify.")
parser.add_argument("-m", "--modify_attribute", action='store_true', help="If set, the tag will be add to the attribute, otherwise to the event.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
event = misp.get_event(args.event)
if args.modify_attribute:
for temp in event['Event']['Attribute']:
if temp['id'] == args.attribute:
attribute = temp
break
misp.add_tag(attribute, args.tag, attribute=True)
else:
misp.add_tag(event['Event'], args.tag)

View File

@ -3,21 +3,18 @@
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import os
import json
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
result = m.get_event(event)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Tag something.')
parser.add_argument("-u", "--uuid", help="UUID to tag.")
parser.add_argument("-e", "--event", help="Event ID to tag.")
parser.add_argument("-a", "--attribute", help="Attribute ID to tag")
parser.add_argument("-t", "--tag", required=True, help="Attribute ID to modify.")
parser.add_argument("-t", "--tag", required=True, help="Tag ID.")
args = parser.parse_args()
if not args.event and not args.uuid and not args.attribute:
@ -26,12 +23,9 @@ if __name__ == '__main__':
misp = init(misp_url, misp_key)
event = misp.get_event(args.event)
if args.event and not args.attribute:
result = misp.search(eventid=args.event)
data = result['response']
for event in data:
for event in result:
uuid = event['Event']['uuid']
if args.attribute:
@ -39,8 +33,7 @@ if __name__ == '__main__':
print("Please provide event ID also")
exit()
result = misp.search(eventid=args.event)
data = result['response']
for event in data:
for event in result:
for attribute in event['Event']['Attribute']:
if attribute["id"] == args.attribute:
uuid = attribute["uuid"]
@ -48,5 +41,5 @@ if __name__ == '__main__':
if args.uuid:
uuid = args.uuid
print("UUID tagged: %s"%uuid)
print("UUID tagged: %s" % uuid)
misp.tag(uuid, args.tag)

View File

@ -0,0 +1,40 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pymisp import PyMISP
import sys
import json
# NOTE: the user of the API key *need to be a sync user*
remote_url = 'https://misp.remote'
remote_api_key = 'REMOTE KEY FOR SYNC USER'
remote_verify = True
# NOTE: the user of the API key *need to be an admin*
own_url = 'https://misp.own'
own_api_key = 'OWN KEY FOR ADMIN USER'
own_verify = True
remote_misp = PyMISP(url=remote_url, key=remote_api_key, ssl=remote_verify)
sync_config = remote_misp.get_sync_config()
if 'errors' in sync_config:
print('Sumething went wrong:')
print(json.dumps(sync_config, indent=2))
sys.exit(1)
else:
print('Sucessfully got a sync config:')
print(json.dumps(sync_config, indent=2))
own_misp = PyMISP(url=own_url, key=own_api_key, ssl=own_verify)
response = own_misp.import_server(sync_config)
if 'errors' in response:
print('Sumething went wrong:')
print(json.dumps(response, indent=2))
sys.exit(1)
else:
print('Sucessfully added the sync config:')
print(json.dumps(response, indent=2))

View File

@ -2,13 +2,9 @@
# -*- coding: utf-8 -*-
from keys import misp_url, misp_key, misp_verifycert
from pymisp import PyMISP
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
from pymisp import ExpandedPyMISP
if __name__ == '__main__':
misp = init(misp_url, misp_key)
misp.cache_all_feeds()
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
misp.cache_all_feeds()

View File

@ -0,0 +1,493 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import datetime
from dateutil.parser import parse
import csv
from pathlib import Path
import json
from uuid import uuid4
import requests
from pymisp import MISPEvent, MISPObject, MISPTag, MISPOrganisation
from pymisp.tools import feed_meta_generator
class Scrippts:
def __init__(self, output_dir: str= 'output', org_name: str='CIRCL',
org_uuid: str='55f6ea5e-2c60-40e5-964f-47a8950d210f'):
self.misp_org = MISPOrganisation()
self.misp_org.name = org_name
self.misp_org.uuid = org_uuid
self.output_dir = Path(output_dir)
self.output_dir.mkdir(exist_ok=True)
self.data_dir = self.output_dir / 'data'
self.data_dir.mkdir(exist_ok=True)
self.scrippts_meta_file = self.output_dir / '.meta_scrippts'
self.scrippts_meta = {}
if self.scrippts_meta_file.exists():
# Format: <infofield>,<uuid>.json
with self.scrippts_meta_file.open() as f:
reader = csv.reader(f)
for row in reader:
self.scrippts_meta[row[0]] = row[1]
else:
self.scrippts_meta_file.touch()
def geolocation_alt(self) -> MISPObject:
# Alert, NWT, Canada
location = MISPObject('geolocation', standalone=False)
location.add_attribute('latitude', 82.3)
location.add_attribute('longitude', 62.3)
location.add_attribute('altitude', 210)
location.add_attribute('text', 'Alert, NWT, Canada')
return location
def tag_alt(self) -> MISPTag:
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:ALT'
return tag
def geolocation_ptb(self):
# Point Barrow, Alaska
location = MISPObject('geolocation')
location.add_attribute('latitude', 71.3)
location.add_attribute('longitude', 156.6)
location.add_attribute('altitude', 11)
location.add_attribute('text', 'Point Barrow, Alaska')
return location
def tag_ptb(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:PTB'
return tag
def geolocation_stp(self) -> MISPObject:
# Station P
location = MISPObject('geolocation')
location.add_attribute('latitude', 50)
location.add_attribute('longitude', 145)
location.add_attribute('altitude', 0)
location.add_attribute('text', 'Station P')
return location
def tag_stp(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:STP'
return tag
def geolocation_ljo(self) -> MISPObject:
# La Jolla Pier, California
location = MISPObject('geolocation')
location.add_attribute('latitude', 32.9)
location.add_attribute('longitude', 117.3)
location.add_attribute('altitude', 10)
location.add_attribute('text', 'La Jolla Pier, California')
return location
def tag_ljo(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:LJO'
return tag
def geolocation_bcs(self) -> MISPObject:
# Baja California Sur, Mexico
location = MISPObject('geolocation')
location.add_attribute('latitude', 23.3)
location.add_attribute('longitude', 110.2)
location.add_attribute('altitude', 4)
location.add_attribute('text', 'Baja California Sur, Mexico')
return location
def tag_bcs(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:BCS'
return tag
def geolocation_mlo(self) -> MISPObject:
# Mauna Loa Observatory, Hawaii
location = MISPObject('geolocation')
location.add_attribute('latitude', 19.5)
location.add_attribute('longitude', 155.6)
location.add_attribute('altitude', 3397)
location.add_attribute('text', 'Mauna Loa Observatory, Hawaii')
return location
def tag_mlo(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:MLO'
return tag
def geolocation_kum(self) -> MISPObject:
# Cape Kumukahi, Hawaii
location = MISPObject('geolocation')
location.add_attribute('latitude', 19.5)
location.add_attribute('longitude', 154.8)
location.add_attribute('altitude', 3)
location.add_attribute('text', 'Cape Kumukahi, Hawaii')
return location
def tag_kum(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:KUM'
return tag
def geolocation_chr(self):
# Christmas Island, Fanning Island
location = MISPObject('geolocation')
location.add_attribute('latitude', 2)
location.add_attribute('longitude', 157.3)
location.add_attribute('altitude', 2)
location.add_attribute('text', 'Christmas Island, Fanning Island')
return location
def tag_chr(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:CHR'
return tag
def geolocation_sam(self):
# American Samoa
location = MISPObject('geolocation')
location.add_attribute('latitude', 14.2)
location.add_attribute('longitude', 170.6)
location.add_attribute('altitude', 30)
location.add_attribute('text', 'American Samoa')
return location
def tag_sam(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:SAM'
return tag
def geolocation_ker(self):
# Kermadec Islands, Raoul Island
location = MISPObject('geolocation')
location.add_attribute('latitude', 29.2)
location.add_attribute('longitude', 177.9)
location.add_attribute('altitude', 2)
location.add_attribute('text', 'Kermadec Islands, Raoul Island')
return location
def tag_ker(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:KER'
return tag
def geolocation_nzd(self):
# Baring Head, New Zealand
location = MISPObject('geolocation')
location.add_attribute('latitude', 41.4)
location.add_attribute('longitude', 174.9)
location.add_attribute('altitude', 85)
location.add_attribute('text', 'Baring Head, New Zealand')
return location
def tag_nzd(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:NZD'
return tag
def geolocation_psa(self):
# Palmer Station, Antarctica
location = MISPObject('geolocation')
location.add_attribute('latitude', 64.9)
location.add_attribute('longitude', 64)
location.add_attribute('altitude', 10)
location.add_attribute('text', 'Palmer Station, Antarctica')
return location
def tag_psa(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:PSA'
return tag
def geolocation_spo(self):
# South Pole
location = MISPObject('geolocation')
location.add_attribute('latitude', 90)
location.add_attribute('longitude', 0)
location.add_attribute('altitude', 2810)
location.add_attribute('text', 'South Pole')
return location
def tag_spo(self):
tag = MISPTag()
tag.name = 'scrippsco2-sampling-stations:SPO'
return tag
def fetch(self, url):
filepath = self.data_dir / Path(url).name
r = requests.get(url)
if r.status_code != 200 or r.text[0] != '"':
print(url)
return False
with filepath.open('w') as f:
f.write(r.text)
return filepath
def import_all(self, stations_short_names, interval, data_type):
object_creator = getattr(self, f'{interval}_flask_{data_type}')
if data_type == 'co2':
base_url = 'https://scrippsco2.ucsd.edu/assets/data/atmospheric/stations/flask_co2/'
elif data_type in ['c13', 'o18']:
base_url = 'https://scrippsco2.ucsd.edu/assets/data/atmospheric/stations/flask_isotopic/'
for station in stations_short_names:
url = f'{base_url}/{interval}/{interval}_flask_{data_type}_{station}.csv'
infofield = f'[{station.upper()}] {interval} average atmospheric {data_type} concentrations'
filepath = self.fetch(url)
if not filepath:
continue
if infofield in self.scrippts_meta:
event = MISPEvent()
event.load_file(str(self.output_dir / self.scrippts_meta[infofield]))
location = event.get_objects_by_name('geolocation')[0]
update = True
else:
event = MISPEvent()
event.uuid = str(uuid4())
event.info = infofield
event.Orgc = self.misp_org
event.add_tag(getattr(self, f'tag_{station}')())
location = getattr(self, f'geolocation_{station}')()
event.add_object(location)
event.add_attribute('link', f'https://scrippsco2.ucsd.edu/data/atmospheric_co2/{station}')
update = False
with self.scrippts_meta_file.open('a') as f:
writer = csv.writer(f)
writer.writerow([infofield, f'{event.uuid}.json'])
object_creator(event, location, filepath, update)
if update:
# Bump the publish timestamp
event.publish_timestamp = datetime.datetime.timestamp(datetime.datetime.now())
feed_output = event.to_feed(with_meta=False)
with (self.output_dir / f'{event.uuid}.json').open('w') as f:
# json.dump(feed_output, f, indent=2, sort_keys=True) # For testing
json.dump(feed_output, f)
def import_monthly_co2_all(self):
to_import = ['alt', 'ptb', 'stp', 'ljo', 'bcs', 'mlo', 'kum', 'chr', 'sam', 'ker', 'nzd']
self.import_all(to_import, 'monthly', 'co2')
def import_monthly_c13_all(self):
to_import = ['alt', 'ptb', 'stp', 'ljo', 'bcs', 'mlo', 'kum', 'chr', 'sam', 'ker', 'nzd', 'psa', 'spo']
self.import_all(to_import, 'monthly', 'c13')
def import_monthly_o18_all(self):
to_import = ['alt', 'ptb', 'stp', 'ljo', 'bcs', 'mlo', 'kum', 'chr', 'sam', 'ker', 'nzd', 'spo']
self.import_all(to_import, 'monthly', 'o18')
def import_daily_co2_all(self):
to_import = ['alt', 'ptb', 'stp', 'ljo', 'bcs', 'mlo', 'kum', 'chr', 'sam', 'ker', 'nzd']
self.import_all(to_import, 'daily', 'co2')
def import_daily_c13_all(self):
to_import = ['alt', 'ptb', 'ljo', 'bcs', 'mlo', 'kum', 'chr', 'sam', 'ker', 'nzd', 'spo']
self.import_all(to_import, 'daily', 'c13')
def import_daily_o18_all(self):
to_import = ['alt', 'ptb', 'ljo', 'bcs', 'mlo', 'kum', 'chr', 'sam', 'ker', 'nzd', 'spo']
self.import_all(to_import, 'daily', 'o18')
def split_data_comment(self, csv_file, update, event):
comment = ''
data = []
with csv_file.open() as f:
for line in f:
if line[0] == '"':
if update:
continue
if '----------' in line:
event.add_attribute('comment', comment, disable_correlation=True)
comment = ''
continue
comment += line[1:-1].strip()
else:
data.append(line)
if not update:
event.add_attribute('comment', comment, disable_correlation=True)
return data
def monthly_flask_co2(self, event, location, csv_file, update):
data = self.split_data_comment(csv_file, update, event)
dates_already_imported = []
if update:
# get all datetime from existing event
for obj in event.get_objects_by_name('scrippsco2-co2-monthly'):
date_attribute = obj.get_attributes_by_relation('sample-datetime')[0]
dates_already_imported.append(date_attribute.value)
reader = csv.reader(data)
for row in reader:
if not row[0].isdigit():
# This file has fucked up headers
continue
sample_date = parse(f'{row[0]}-{row[1]}-16T00:00:00')
if sample_date in dates_already_imported:
continue
obj = MISPObject('scrippsco2-co2-monthly', standalone=False)
obj.add_attribute('sample-datetime', sample_date)
obj.add_attribute('sample-date-excel', float(row[2]))
obj.add_attribute('sample-date-fractional', float(row[3]))
obj.add_attribute('monthly-co2', float(row[4]))
obj.add_attribute('monthly-co2-seasonal-adjustment', float(row[5]))
obj.add_attribute('monthly-co2-smoothed', float(row[6]))
obj.add_attribute('monthly-co2-smoothed-seasonal-adjustment', float(row[7]))
obj.add_reference(location, 'sampling-location')
event.add_object(obj)
def monthly_flask_c13(self, event, location, csv_file, update):
data = self.split_data_comment(csv_file, update, event)
dates_already_imported = []
if update:
# get all datetime from existing event
for obj in event.get_objects_by_name('scrippsco2-c13-monthly'):
date_attribute = obj.get_attributes_by_relation('sample-datetime')[0]
dates_already_imported.append(date_attribute.value)
reader = csv.reader(data)
for row in reader:
if not row[0].isdigit():
# This file has fucked up headers
continue
sample_date = parse(f'{row[0]}-{row[1]}-16T00:00:00')
if sample_date in dates_already_imported:
continue
obj = MISPObject('scrippsco2-c13-monthly', standalone=False)
obj.add_attribute('sample-datetime', sample_date)
obj.add_attribute('sample-date-excel', float(row[2]))
obj.add_attribute('sample-date-fractional', float(row[3]))
obj.add_attribute('monthly-c13', float(row[4]))
obj.add_attribute('monthly-c13-seasonal-adjustment', float(row[5]))
obj.add_attribute('monthly-c13-smoothed', float(row[6]))
obj.add_attribute('monthly-c13-smoothed-seasonal-adjustment', float(row[7]))
obj.add_reference(location, 'sampling-location')
event.add_object(obj)
def monthly_flask_o18(self, event, location, csv_file, update):
data = self.split_data_comment(csv_file, update, event)
dates_already_imported = []
if update:
# get all datetime from existing event
for obj in event.get_objects_by_name('scrippsco2-o18-monthly'):
date_attribute = obj.get_attributes_by_relation('sample-datetime')[0]
dates_already_imported.append(date_attribute.value)
reader = csv.reader(data)
for row in reader:
if not row[0].isdigit():
# This file has fucked up headers
continue
sample_date = parse(f'{row[0]}-{row[1]}-16T00:00:00')
if sample_date in dates_already_imported:
continue
obj = MISPObject('scrippsco2-o18-monthly', standalone=False)
obj.add_attribute('sample-datetime', sample_date)
obj.add_attribute('sample-date-excel', float(row[2]))
obj.add_attribute('sample-date-fractional', float(row[3]))
obj.add_attribute('monthly-o18', float(row[4]))
obj.add_attribute('monthly-o18-seasonal-adjustment', float(row[5]))
obj.add_attribute('monthly-o18-smoothed', float(row[6]))
obj.add_attribute('monthly-o18-smoothed-seasonal-adjustment', float(row[7]))
obj.add_reference(location, 'sampling-location')
event.add_object(obj)
def daily_flask_co2(self, event, location, csv_file, update):
data = self.split_data_comment(csv_file, update, event)
dates_already_imported = []
if update:
# get all datetime from existing event
for obj in event.get_objects_by_name('scrippsco2-co2-daily'):
date_attribute = obj.get_attributes_by_relation('sample-datetime')[0]
dates_already_imported.append(date_attribute.value)
reader = csv.reader(data)
for row in reader:
sample_date = parse(f'{row[0]}-{row[1]}')
if sample_date in dates_already_imported:
continue
obj = MISPObject('scrippsco2-co2-daily', standalone=False)
obj.add_attribute('sample-datetime', sample_date)
obj.add_attribute('sample-date-excel', float(row[2]))
obj.add_attribute('sample-date-fractional', float(row[3]))
obj.add_attribute('number-flask', int(row[4]))
obj.add_attribute('flag', int(row[5]))
attr = obj.add_attribute('co2-value', float(row[6]))
attr.add_tag(f'scrippsco2-fgc:{int(row[5])}')
obj.add_reference(location, 'sampling-location')
event.add_object(obj)
def daily_flask_c13(self, event, location, csv_file, update):
data = self.split_data_comment(csv_file, update, event)
dates_already_imported = []
if update:
# get all datetime from existing event
for obj in event.get_objects_by_name('scrippsco2-c13-daily'):
date_attribute = obj.get_attributes_by_relation('sample-datetime')[0]
dates_already_imported.append(date_attribute.value)
reader = csv.reader(data)
for row in reader:
sample_date = parse(f'{row[0]}-{row[1]}')
if sample_date in dates_already_imported:
continue
obj = MISPObject('scrippsco2-c13-daily', standalone=False)
obj.add_attribute('sample-datetime', sample_date)
obj.add_attribute('sample-date-excel', float(row[2]))
obj.add_attribute('sample-date-fractional', float(row[3]))
obj.add_attribute('number-flask', int(row[4]))
obj.add_attribute('flag', int(row[5]))
attr = obj.add_attribute('c13-value', float(row[6]))
attr.add_tag(f'scrippsco2-fgi:{int(row[5])}')
obj.add_reference(location, 'sampling-location')
event.add_object(obj)
def daily_flask_o18(self, event, location, csv_file, update):
data = self.split_data_comment(csv_file, update, event)
dates_already_imported = []
if update:
# get all datetime from existing event
for obj in event.get_objects_by_name('scrippsco2-o18-daily'):
date_attribute = obj.get_attributes_by_relation('sample-datetime')[0]
dates_already_imported.append(date_attribute.value)
reader = csv.reader(data)
for row in reader:
sample_date = parse(f'{row[0]}-{row[1]}')
if sample_date in dates_already_imported:
continue
obj = MISPObject('scrippsco2-o18-daily', standalone=False)
obj.add_attribute('sample-datetime', sample_date)
obj.add_attribute('sample-date-excel', float(row[2]))
obj.add_attribute('sample-date-fractional', float(row[3]))
obj.add_attribute('number-flask', int(row[4]))
obj.add_attribute('flag', int(row[5]))
attr = obj.add_attribute('o18-value', float(row[6]))
attr.add_tag(f'scrippsco2-fgi:{int(row[5])}')
obj.add_reference(location, 'sampling-location')
event.add_object(obj)
if __name__ == '__main__':
output_dir = 'scrippsco2_feed'
i = Scrippts(output_dir=output_dir)
i.import_daily_co2_all()
i.import_daily_c13_all()
i.import_daily_o18_all()
i.import_monthly_co2_all()
i.import_monthly_c13_all()
i.import_monthly_o18_all()
feed_meta_generator(Path(output_dir))

View File

@ -0,0 +1,68 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import os
SILENT = False
def getTagToApplyToEvent(event):
tags_to_apply = set()
event_tags = { tag.name for tag in event.tags }
for galaxy in event.galaxies:
for cluster in galaxy.clusters:
event_tags.add(cluster.tag_name)
for attribute in event.attributes:
for attribute_tag in attribute.tags:
if attribute_tag.name not in event_tags:
tags_to_apply.add(attribute_tag.name)
return tags_to_apply
def TagEvent(event, tags_to_apply):
for tag in tags_to_apply:
event.add_tag(tag)
return event
def condPrint(text):
if not SILENT:
print(text)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get an event from a MISP instance.')
parser.add_argument("-e", "--event", required=True, help="Event ID to get.")
parser.add_argument("-y", "--yes", required=False, default=False, action='store_true', help="Automatically accept prompt.")
parser.add_argument("-s", "--silent", required=False, default=False, action='store_true', help="No output to stdin.")
args = parser.parse_args()
SILENT = args.silent
misp = PyMISP(misp_url, misp_key, misp_verifycert)
event = misp.get_event(args.event, pythonify=True)
tags_to_apply = getTagToApplyToEvent(event)
condPrint('Tag to apply at event level:')
for tag in tags_to_apply:
condPrint(f'- {tag}')
confirmed = False
if args.yes:
confirmed = True
else:
confirm = input('Confirm [Y/n]: ')
confirmed = len(confirm) == 0 or confirm == 'Y' or confirm == 'y'
if confirmed:
event = TagEvent(event, tags_to_apply)
condPrint(f'Updating event {args.event}')
misp.update_event(event)
condPrint(f'Event {args.event} tagged with {len(tags_to_apply)} tags')
else:
condPrint('Operation cancelled')

View File

@ -0,0 +1,152 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pathlib import Path
from csv import DictReader
from pymisp import MISPEvent, MISPOrganisation, PyMISP, MISPObject
from datetime import datetime
from dateutil.parser import parse
import json
from pymisp.tools import feed_meta_generator
from io import BytesIO
from collections import defaultdict
make_feed = False
aggregate_by_country = True
path = Path('/home/raphael/gits/COVID-19/csse_covid_19_data/csse_covid_19_daily_reports/')
def get_country_region(row):
if 'Country/Region' in row:
return row['Country/Region']
elif 'Country_Region' in row:
return row['Country_Region']
else:
print(p, row.keys())
raise Exception()
def get_last_update(row):
if 'Last_Update' in row:
return parse(row['Last_Update'])
elif 'Last Update' in row:
return parse(row['Last Update'])
else:
print(p, row.keys())
raise Exception()
def add_detailed_object(obj, row):
if 'Province/State' in row:
if row['Province/State']:
obj.add_attribute('province-state', row['Province/State'])
elif '\ufeffProvince/State' in row:
if row['\ufeffProvince/State']:
obj.add_attribute('province-state', row['\ufeffProvince/State'])
elif 'Province_State' in row:
if row['Province_State']:
obj.add_attribute('province-state', row['Province_State'])
else:
print(p, row.keys())
raise Exception()
obj.add_attribute('country-region', get_country_region(row))
obj.add_attribute('update', get_last_update(row))
if 'Lat' in row:
obj.add_attribute('latitude', row['Lat'])
if 'Long_' in row:
obj.add_attribute('longitude', row['Long_'])
elif 'Long' in row:
obj.add_attribute('longitude', row['Long'])
if row['Confirmed']:
obj.add_attribute('confirmed', int(row['Confirmed']))
if row['Deaths']:
obj.add_attribute('death', int(row['Deaths']))
if row['Recovered']:
obj.add_attribute('recovered', int(row['Recovered']))
if 'Active' in row and row['Active']:
obj.add_attribute('active', int(row['Active']))
def country_aggregate(aggregate, row):
c = get_country_region(row)
if c not in aggregate:
aggregate[c] = defaultdict(active=0, death=0, recovered=0, confirmed=0, update=datetime.fromtimestamp(0))
if row['Confirmed']:
aggregate[c]['confirmed'] += int(row['Confirmed'])
if row['Deaths']:
aggregate[c]['death'] += int(row['Deaths'])
if row['Recovered']:
aggregate[c]['recovered'] += int(row['Recovered'])
if 'Active' in row and row['Active']:
aggregate[c]['active'] += int(row['Active'])
update = get_last_update(row)
if update > aggregate[c]['update']:
aggregate[c]['update'] = update
if make_feed:
org = MISPOrganisation()
org.name = 'CIRCL'
org.uuid = "55f6ea5e-2c60-40e5-964f-47a8950d210f"
else:
from covid_key import url, key
misp = PyMISP(url, key)
for p in path.glob('**/*.csv'):
d = datetime.strptime(p.name[:-4], '%m-%d-%Y').date()
event = MISPEvent()
if aggregate_by_country:
event.info = f"[{d.isoformat()}] CSSE COVID-19 daily report"
else:
event.info = f"[{d.isoformat()}] CSSE COVID-19 detailed daily report"
event.date = d
event.distribution = 3
event.add_tag('tlp:white')
if make_feed:
event.orgc = org
else:
e = misp.search(eventinfo=event.info, metadata=True, pythonify=True)
if e:
# Already added.
continue
event.add_attribute('attachment', p.name, data=BytesIO(p.open('rb').read()))
event.add_attribute('link', f'https://github.com/CSSEGISandData/COVID-19/tree/master/csse_covid_19_data/csse_covid_19_daily_reports/{p.name}', comment='Source')
if aggregate_by_country:
aggregate = defaultdict()
with p.open() as f:
reader = DictReader(f)
for row in reader:
if aggregate_by_country:
country_aggregate(aggregate, row)
else:
obj = MISPObject(name='covid19-csse-daily-report')
add_detailed_object(obj, row)
event.add_object(obj)
if aggregate_by_country:
for country, values in aggregate.items():
obj = event.add_object(name='covid19-csse-daily-report', standalone=False)
obj.add_attribute('country-region', country)
obj.add_attribute('update', values['update'])
obj.add_attribute('confirmed', values['confirmed'])
obj.add_attribute('death', values['death'])
obj.add_attribute('recovered', values['recovered'])
obj.add_attribute('active', values['active'])
if make_feed:
with (Path('output') / f'{event.uuid}.json').open('w') as _w:
json.dump(event.to_feed(), _w)
else:
event = misp.add_event(event)
misp.publish(event)
if make_feed:
feed_meta_generator(Path('output'))

View File

@ -0,0 +1,77 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pathlib import Path
from pymisp import MISPEvent, MISPOrganisation, PyMISP
from dateutil.parser import parse
import json
from pymisp.tools import feed_meta_generator
from io import BytesIO
make_feed = False
path = Path('/home/raphael/gits/covid-19-china/data')
if make_feed:
org = MISPOrganisation()
org.name = 'CIRCL'
org.uuid = "55f6ea5e-2c60-40e5-964f-47a8950d210f"
else:
from covid_key import url, key
misp = PyMISP(url, key)
for p in path.glob('*_json/current_china.json'):
d = parse(p.parent.name[:-5])
event = MISPEvent()
event.info = f"[{d.isoformat()}] DXY COVID-19 live report"
event.date = d
event.distribution = 3
event.add_tag('tlp:white')
if make_feed:
event.orgc = org
else:
e = misp.search(eventinfo=event.info, metadata=True, pythonify=True)
if e:
# Already added.
continue
event.add_attribute('attachment', p.name, data=BytesIO(p.open('rb').read()))
with p.open() as f:
data = json.load(f)
for province in data:
obj_province = event.add_object(name='covid19-dxy-live-province', standalone=False)
obj_province.add_attribute('province', province['provinceName'])
obj_province.add_attribute('update', d)
if province['currentConfirmedCount']:
obj_province.add_attribute('current-confirmed', province['currentConfirmedCount'])
if province['confirmedCount']:
obj_province.add_attribute('total-confirmed', province['confirmedCount'])
if province['curedCount']:
obj_province.add_attribute('total-cured', province['curedCount'])
if province['deadCount']:
obj_province.add_attribute('total-death', province['deadCount'])
if province['comment']:
obj_province.add_attribute('comment', province['comment'])
for city in province['cities']:
obj_city = event.add_object(name='covid19-dxy-live-city', standalone=False)
obj_city.add_attribute('city', city['cityName'])
obj_city.add_attribute('update', d)
if city['currentConfirmedCount']:
obj_city.add_attribute('current-confirmed', city['currentConfirmedCount'])
if city['confirmedCount']:
obj_city.add_attribute('total-confirmed', city['confirmedCount'])
if city['curedCount']:
obj_city.add_attribute('total-cured', city['curedCount'])
if city['deadCount']:
obj_city.add_attribute('total-death', city['deadCount'])
obj_city.add_reference(obj_province, 'part-of')
if make_feed:
with (Path('output') / f'{event.uuid}.json').open('w') as _w:
json.dump(event.to_feed(), _w)
else:
misp.add_event(event)
if make_feed:
feed_meta_generator(Path('output'))

View File

@ -1,19 +1,10 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP, MISPEvent
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json', debug=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create an event on MISP.')
@ -23,7 +14,13 @@ if __name__ == '__main__':
parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicable. [1-4]")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
event = misp.new_event(args.distrib, args.threat, args.analysis, args.info)
event = MISPEvent()
event.distribution = args.distrib
event.threat_level_id = args.threat
event.analysis = args.analysis
event.info = args.info
event = misp.add_event(event, pythonify=True)
print(event)

549
examples/cytomic_orion.py Executable file
View File

@ -0,0 +1,549 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Koen Van Impe
Cytomic Automation
Put this script in crontab to run every /15 or /60
*/15 * * * * mispuser /usr/bin/python3 /home/mispuser/PyMISP/examples/cytomic_orion.py
Fetches the configuration set in the Cytomic Orion enrichment module
- events : upload events tagged with the 'upload' tag, all the attributes supported by Cytomic Orion
- upload : upload attributes flagged with the 'upload' tag (only attributes supported by Cytomic Orion)
- delete : delete attributes flagged with the 'upload' tag (only attributes supported by Cytomic Orion)
'''
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import os
import re
import sys
import requests
import json
import urllib3
def get_token(token_url, clientid, clientsecret, scope, grant_type, username, password):
'''
Get oAuth2 token
Configuration settings are fetched first from the MISP module configu
'''
try:
if scope and grant_type and username and password:
data = {'scope': scope, 'grant_type': grant_type, 'username': username, 'password': password}
if token_url and clientid and clientsecret:
access_token_response = requests.post(token_url, data=data, verify=False, allow_redirects=False, auth=(clientid, clientsecret))
tokens = json.loads(access_token_response.text)
if 'access_token' in tokens:
access_token = tokens['access_token']
return access_token
else:
sys.exit('No token received')
else:
sys.exit('No token_url, clientid or clientsecret supplied')
else:
sys.exit('No scope, grant_type, username or password supplied')
except Exception:
sys.exit('Unable to connect to token_url')
def get_config(url, key, misp_verifycert):
'''
Get the module config and the settings needed to access the API
Also contains the settings to do the query
'''
try:
misp_headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': key}
req = requests.get(url + 'servers/serverSettings.json', verify=misp_verifycert, headers=misp_headers)
if req.status_code == 200:
req_json = req.json()
if 'finalSettings' in req_json:
finalSettings = req_json['finalSettings']
clientid = clientsecret = scope = username = password = grant_type = api_url = token_url = ''
module_enabled = False
scope = 'orion.api'
grant_type = 'password'
limit_upload_events = 50
limit_upload_attributes = 50
ttlDays = "1"
last_attributes = '5d'
post_threat_level_id = 2
for el in finalSettings:
# Is the module enabled?
if el['setting'] == 'Plugin.Enrichment_cytomic_orion_enabled':
module_enabled = el['value']
if module_enabled is False:
break
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_clientid':
clientid = el['value']
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_clientsecret':
clientsecret = el['value']
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_username':
username = el['value']
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_password':
password = el['value']
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_api_url':
api_url = el['value'].replace('\\/', '/')
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_token_url':
token_url = el['value'].replace('\\/', '/')
elif el['setting'] == 'MISP.baseurl':
misp_baseurl = el['value']
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_upload_threat_level_id':
if el['value']:
try:
post_threat_level_id = int(el['value'])
except:
continue
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_upload_ttlDays':
if el['value']:
try:
ttlDays = "{last_days}".format(last_days=int(el['value']))
except:
continue
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_upload_timeframe':
if el['value']:
try:
last_attributes = "{last_days}d".format(last_days=int(el['value']))
except:
continue
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_upload_tag':
upload_tag = el['value']
elif el['setting'] == 'Plugin.Enrichment_cytomic_orion_delete_tag':
delete_tag = el['value']
elif el['setting'] == 'Plugin.Enrichment_limit_upload_events':
if el['value']:
try:
limit_upload_events = "{limit_upload_events}".format(limit_upload_events=int(el['value']))
except:
continue
elif el['setting'] == 'Plugin.Enrichment_limit_upload_attributes':
if el['value']:
try:
limit_upload_attributes = "{limit_upload_attributes}".format(limit_upload_attributes=int(el['value']))
except:
continue
else:
sys.exit('Did not receive a 200 code from MISP')
if module_enabled and api_url and token_url and clientid and clientsecret and username and password and grant_type:
return {'cytomic_policy': 'Detect',
'upload_timeframe': last_attributes,
'upload_tag': upload_tag,
'delete_tag': delete_tag,
'upload_ttlDays': ttlDays,
'post_threat_level_id': post_threat_level_id,
'clientid': clientid,
'clientsecret': clientsecret,
'scope': scope,
'username': username,
'password': password,
'grant_type': grant_type,
'api_url': api_url,
'token_url': token_url,
'misp_baseurl': misp_baseurl,
'limit_upload_events': limit_upload_events,
'limit_upload_attributes': limit_upload_attributes}
else:
sys.exit('Did not receive all the necessary configuration information from MISP')
except Exception as e:
sys.exit('Unable to get module config from MISP')
class cytomicobject:
misp = None
lst_evtid = None
lst_attuuid = None
lst_attuuid_error = None
endpoint_ioc = None
api_call_headers = None
post_data = None
args = None
tag = None
limit_events = None
limit_attributes = None
atttype_misp = None
atttype_cytomic = None
attlabel_cytomic = None
att_types = {
"ip-dst": {"ip": "ipioc"},
"ip-src": {"ip": "ipioc"},
"url": {"url": "urlioc"},
"md5": {"hash": "filehashioc"},
"domain": {"domain": "domainioc"},
"hostname": {"domain": "domainioc"},
"domain|ip": {"domain": "domainioc"},
"hostname|port": {"domain": "domainioc"}
}
debug = True
error = False
res = False
res_msg = None
def collect_events_ids(cytomicobj, moduleconfig):
# Get events that contain Cytomic tag.
try:
evt_result = cytomicobj.misp.search(controller='events', limit=cytomicobj.limit_events, tags=cytomicobj.tag, last=moduleconfig['upload_timeframe'], published=True, deleted=False, pythonify=True)
cytomicobj.lst_evtid = ['x', 'y']
for evt in evt_result:
evt = cytomicobj.misp.get_event(event=evt['id'], pythonify=True)
if len(evt.tags) > 0:
for tg in evt.tags:
if tg.name == cytomicobj.tag:
if not cytomicobj.lst_evtid:
cytomicobj.lst_evtid = str(evt['id'])
else:
if not evt['id'] in cytomicobj.lst_evtid:
cytomicobj.lst_evtid.append(str(evt['id']))
break
cytomicobj.lst_evtid.remove('x')
cytomicobj.lst_evtid.remove('y')
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to collect events ids')
def find_eventid(cytomicobj, evtid):
# Get events that contain Cytomic tag.
try:
cytomicobj.res = False
for id in cytomicobj.lst_evtid:
if id == evtid:
cytomicobj.res = True
break
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to collect events ids')
def print_result_events(cytomicobj):
try:
if cytomicobj.res_msg is not None:
for key, msg in cytomicobj.res_msg.items():
if msg is not None:
print(key, msg)
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to print result')
def set_postdata(cytomicobj, moduleconfig, attribute):
# Set JSON to send to the API.
try:
if cytomicobj.args.upload or cytomicobj.args.events:
event = attribute['Event']
event_title = event['info']
event_id = event['id']
threat_level_id = int(event['threat_level_id'])
if moduleconfig['post_threat_level_id'] <= threat_level_id:
if cytomicobj.atttype_misp == 'domain|ip' or cytomicobj.atttype_misp == 'hostname|port':
post_value = attribute['value'].split('|')[0]
else:
post_value = attribute['value']
if cytomicobj.atttype_misp == 'url' and 'http' not in post_value:
pass
else:
if cytomicobj.post_data is None:
cytomicobj.post_data = [{cytomicobj.attlabel_cytomic: post_value, 'AdditionalData': '{} {}'.format(cytomicobj.atttype_misp, attribute['comment']).strip(), 'Source': 'Uploaded from MISP', 'Policy': moduleconfig['cytomic_policy'], 'Description': '{} - {}'.format(event_id, event_title).strip()}]
else:
if post_value not in str(cytomicobj.post_data):
cytomicobj.post_data.append({cytomicobj.attlabel_cytomic: post_value, 'AdditionalData': '{} {}'.format(cytomicobj.atttype_misp, attribute['comment']).strip(), 'Source': 'Uploaded from MISP', 'Policy': moduleconfig['cytomic_policy'], 'Description': '{} - {}'.format(event_id, event_title).strip()})
else:
if cytomicobject.debug:
print('Event %s skipped because of lower threat level' % event_id)
else:
event = attribute['Event']
threat_level_id = int(event['threat_level_id'])
if moduleconfig['post_threat_level_id'] <= threat_level_id:
if cytomicobj.atttype_misp == 'domain|ip' or cytomicobj.atttype_misp == 'hostname|port':
post_value = attribute['value'].split('|')[0]
else:
post_value = attribute['value']
if cytomicobj.atttype_misp == 'url' and 'http' not in post_value:
pass
else:
if cytomicobj.post_data is None:
cytomicobj.post_data = [{cytomicobj.attlabel_cytomic: post_value}]
else:
cytomicobj.post_data.append({cytomicobj.attlabel_cytomic: post_value})
else:
if cytomicobject.debug:
print('Event %s skipped because of lower threat level' % event_id)
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to process post-data')
def send_postdata(cytomicobj, evtid=None):
# Batch post to upload event attributes.
try:
if cytomicobj.post_data is not None:
if cytomicobj.debug:
print('POST: {} {}'.format(cytomicobj.endpoint_ioc, cytomicobj.post_data))
result_post_endpoint_ioc = requests.post(cytomicobj.endpoint_ioc, headers=cytomicobj.api_call_headers, json=cytomicobj.post_data, verify=False)
json_result_post_endpoint_ioc = json.loads(result_post_endpoint_ioc.text)
print(result_post_endpoint_ioc)
if 'true' not in (result_post_endpoint_ioc.text):
cytomicobj.error = True
if evtid is not None:
if cytomicobj.res_msg['Event: ' + str(evtid)] is None:
cytomicobj.res_msg['Event: ' + str(evtid)] = '(Send POST data: errors uploading attributes, event NOT untagged). If the problem persists, please review the format of the value of the attributes is correct.'
else:
cytomicobj.res_msg['Event: ' + str(evtid)] = cytomicobj.res_msg['Event: ' + str(evtid)] + ' (Send POST data -else: errors uploading attributes, event NOT untagged). If the problem persists, please review the format of the value of the attributes is correct.'
if cytomicobj.debug:
print('RESULT: {}'.format(json_result_post_endpoint_ioc))
else:
if evtid is None:
cytomicobj.error = True
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to post attributes')
def process_attributes(cytomicobj, moduleconfig, evtid=None):
# Get attributes to process.
try:
for misptype, cytomictypes in cytomicobject.att_types.items():
cytomicobj.atttype_misp = misptype
for cytomiclabel, cytomictype in cytomictypes.items():
cytomicobj.attlabel_cytomic = cytomiclabel
cytomicobj.atttype_cytomic = cytomictype
cytomicobj.post_data = None
icont = 0
if cytomicobj.args.upload or cytomicobj.args.events:
cytomicobj.endpoint_ioc = moduleconfig['api_url'] + '/iocs/' + cytomicobj.atttype_cytomic + '?ttlDays=' + str(moduleconfig['upload_ttlDays'])
else:
cytomicobj.endpoint_ioc = moduleconfig['api_url'] + '/iocs/eraser/' + cytomicobj.atttype_cytomic
# Get attributes to upload/delete and prepare JSON
# If evtid is set; we're called from --events
if cytomicobject.debug:
print("\nSearching for attributes of type %s" % cytomicobj.atttype_misp)
if evtid is None:
cytomicobj.error = False
attr_result = cytomicobj.misp.search(controller='attributes', last=moduleconfig['upload_timeframe'], limit=cytomicobj.limit_attributes, type_attribute=cytomicobj.atttype_misp, tag=cytomicobj.tag, published=True, deleted=False, includeProposals=False, include_context=True, to_ids=True)
else:
if cytomicobj.error:
break
# We don't search with tags; we have an event for which we want to upload all events
attr_result = cytomicobj.misp.search(controller='attributes', eventid=evtid, last=moduleconfig['upload_timeframe'], limit=cytomicobj.limit_attributes, type_attribute=cytomicobj.atttype_misp, published=True, deleted=False, includeProposals=False, include_context=True, to_ids=True)
cytomicobj.lst_attuuid = ['x', 'y']
if len(attr_result['Attribute']) > 0:
for attribute in attr_result['Attribute']:
if evtid is not None:
if cytomicobj.error:
cytomicobj.res_msg['Event: ' + str(evtid)] = cytomicobj.res_msg['Event: ' + str(evtid)] + ' (errors uploading attributes, event NOT untagged). If the problem persists, please review the format of the value of the attributes is correct.'
break
if icont >= cytomicobj.limit_attributes:
if not cytomicobj.error and cytomicobj.post_data is not None:
# Send data to Cytomic
send_postdata(cytomicobj, evtid)
if not cytomicobj.error:
if 'Event: ' + str(evtid) in cytomicobj.res_msg:
if cytomicobj.res_msg['Event: ' + str(evtid)] is None:
cytomicobj.res_msg['Event: ' + str(evtid)] = cytomicobj.attlabel_cytomic + 's: ' + str(icont)
else:
cytomicobj.res_msg['Event: ' + str(evtid)] += ' | ' + cytomicobj.attlabel_cytomic + 's: ' + str(icont)
else:
if cytomicobject.debug:
print('Data sent (' + cytomicobj.attlabel_cytomic + '): ' + str(icont))
cytomicobj.post_data = None
if cytomicobj.error:
if evtid is not None:
cytomicobj.res_msg['Event: ' + str(evtid)] = cytomicobj.res_msg['Event: ' + str(evtid)] + ' (errors uploading attributes, event NOT untagged). If the problem persists, please review the format of the value of the attributes is correct.'
break
icont = 0
if evtid is None:
event = attribute['Event']
event_id = event['id']
find_eventid(cytomicobj, str(event_id))
if not cytomicobj.res:
if not cytomicobj.lst_attuuid:
cytomicobj.lst_attuuid = attribute['uuid']
else:
if not attribute['uuid'] in cytomicobj.lst_attuuid:
cytomicobj.lst_attuuid.append(attribute['uuid'])
icont += 1
# Prepare data to send
set_postdata(cytomicobj, moduleconfig, attribute)
else:
icont += 1
# Prepare data to send
set_postdata(cytomicobj, moduleconfig, attribute)
if not cytomicobj.error:
# Send data to Cytomic
send_postdata(cytomicobj, evtid)
if not cytomicobj.error and cytomicobj.post_data is not None and icont > 0:
# Data sent; process response
if cytomicobj.res_msg is not None and 'Event: ' + str(evtid) in cytomicobj.res_msg:
if cytomicobj.res_msg['Event: ' + str(evtid)] is None:
cytomicobj.res_msg['Event: ' + str(evtid)] = cytomicobj.attlabel_cytomic + 's: ' + str(icont)
else:
cytomicobj.res_msg['Event: ' + str(evtid)] += ' | ' + cytomicobj.attlabel_cytomic + 's: ' + str(icont)
else:
if cytomicobject.debug:
print('Data sent (' + cytomicobj.attlabel_cytomic + '): ' + str(icont))
if not cytomicobj.error:
cytomicobj.lst_attuuid.remove('x')
cytomicobj.lst_attuuid.remove('y')
# Untag attributes
untag_attributes(cytomicobj)
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to get attributes')
def untag_event(evtid):
# Remove tag of the event being processed.
try:
cytomicobj.records = 0
evt = cytomicobj.misp.get_event(event=evtid, pythonify=True)
if len(evt.tags) > 0:
for tg in evt.tags:
if tg.name == cytomicobj.tag:
cytomicobj.misp.untag(evt['uuid'], cytomicobj.tag)
cytomicobj.records += 1
cytomicobj.res_msg['Event: ' + str(evtid)] = cytomicobj.res_msg['Event: ' + str(evtid)] + ' (event untagged)'
break
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to untag events')
def process_events(cytomicobj, moduleconfig):
# Get events that contain Cytomic tag.
try:
collect_events_ids(cytomicobj, moduleconfig)
total_attributes_sent = 0
for evtid in cytomicobj.lst_evtid:
cytomicobj.error = False
if cytomicobj.res_msg is None:
cytomicobj.res_msg = {'Event: ' + str(evtid): None}
else:
cytomicobj.res_msg['Event: ' + str(evtid)] = None
if cytomicobject.debug:
print('Event id: ' + str(evtid))
# get attributes of each known type of the event / prepare data to send / send data to Cytomic
process_attributes(cytomicobj, moduleconfig, evtid)
if not cytomicobj.error:
untag_event(evtid)
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to process events ids')
def untag_attributes(cytomicobj):
# Remove tag of attributes sent.
try:
icont = 0
if len(cytomicobj.lst_attuuid) > 0:
for uuid in cytomicobj.lst_attuuid:
attr = cytomicobj.misp.get_attribute(attribute=uuid, pythonify=True)
if len(attr.tags) > 0:
for tg in attr.tags:
if tg.name == cytomicobj.tag:
cytomicobj.misp.untag(uuid, cytomicobj.tag)
icont += 1
break
print('Attributes untagged (' + str(icont) + ')')
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to untag attributes')
def process_attributes_upload(cytomicobj, moduleconfig):
# get attributes of each known type / prepare data to send / send data to Cytomic
try:
collect_events_ids(cytomicobj, moduleconfig)
process_attributes(cytomicobj, moduleconfig)
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to upload attributes to Cytomic')
def process_attributes_delete(cytomicobj, moduleconfig):
# get attributes of each known type / prepare data to send / send data to Cytomic
try:
collect_events_ids(cytomicobj, moduleconfig)
process_attributes(cytomicobj, moduleconfig)
except Exception:
cytomicobj.error = True
if cytomicobj.debug:
sys.exit('Unable to delete attributes in Cytomic')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Upload or delete indicators to Cytomic API')
group = parser.add_mutually_exclusive_group()
group.add_argument('--events', action='store_true', help='Upload events indicators')
group.add_argument('--upload', action='store_true', help='Upload indicators')
group.add_argument('--delete', action='store_true', help='Delete indicators')
args = parser.parse_args()
if not args.upload and not args.delete and not args.events:
sys.exit("No valid action for the API")
if misp_verifycert is False:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
module_config = get_config(misp_url, misp_key, misp_verifycert)
cytomicobj = cytomicobject
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, debug=cytomicobject.debug)
cytomicobj.misp = misp
cytomicobj.args = args
access_token = get_token(module_config['token_url'], module_config['clientid'], module_config['clientsecret'], module_config['scope'], module_config['grant_type'], module_config['username'], module_config['password'])
cytomicobj.api_call_headers = {'Authorization': 'Bearer ' + access_token}
if cytomicobj.debug:
print('Received access token')
if cytomicobj.args.events:
cytomicobj.tag = module_config['upload_tag']
cytomicobj.limit_events = module_config['limit_upload_events']
cytomicobj.limit_attributes = module_config['limit_upload_attributes']
process_events(cytomicobj, module_config)
print_result_events(cytomicobj)
elif cytomicobj.args.upload:
cytomicobj.tag = module_config['upload_tag']
cytomicobj.limit_events = 0
cytomicobj.limit_attributes = module_config['limit_upload_attributes']
process_attributes_upload(cytomicobj, module_config)
else:
cytomicobj.tag = module_config['delete_tag']
cytomicobj.limit_events = 0
cytomicobj.limit_attributes = module_config['limit_upload_attributes']
process_attributes_delete(cytomicobj, module_config)

View File

@ -1,26 +1,11 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key,misp_verifycert
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
# Usage for pipe masters: ./last.py -l 5h | jq .
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json', debug=True)
def del_event(m, eventid):
result = m.delete_event(eventid)
print(result)
def del_attr(m, attrid):
result = m.delete_attribute(attrid)
print(result)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Delete an event from a MISP instance.')
parser.add_argument("-e", "--event", help="Event ID to delete.")
@ -28,9 +13,10 @@ if __name__ == '__main__':
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
if args.event:
del_event(misp, args.event)
result = misp.delete_event(args.event)
else:
del_attr(misp, args.attribute)
result = misp.delete_attribute(args.attribute)
print(result)

View File

@ -1,25 +1,16 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Delete the user with the given id. Keep in mind that disabling users (by setting the disabled flag via an edit) is always prefered to keep user associations to events intact.')
parser = argparse.ArgumentParser(description='Delete the user with the given id. Keep in mind that disabling users (by setting the disabled flag via an edit) is always preferred to keep user associations to events intact.')
parser.add_argument("-i", "--user_id", help="The id of the user you want to delete.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
print(misp.delete_user(args.user_id))

20
examples/edit_organisation.py Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import ExpandedPyMISP, MISPOrganisation
from keys import misp_url, misp_key, misp_verifycert
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Edit the email of the organisation designed by the organisation_id.')
parser.add_argument("-i", "--organisation_id", required=True, help="The name of the json file describing the organisation you want to modify.")
parser.add_argument("-e", "--email", help="Email linked to the organisation.")
args = parser.parse_args()
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
org = MISPOrganisation()
org.id = args.organisation_id
org.email = args.email
print(misp.update_organisation(org, pythonify=True))

View File

@ -1,19 +1,10 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP, MISPUser
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Edit the email of the user designed by the user_id.')
@ -21,6 +12,9 @@ if __name__ == '__main__':
parser.add_argument("-e", "--email", help="Email linked to the account.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
user = MISPUser
user.id = args.user_id
user.email = args.email
print(misp.edit_user(args.user_id, email=args.email))
print(misp.edit_user(user, pythonify=True))

View File

@ -1,29 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Edit the user designed by the user_id. If no file is provided, returns a json listing all the fields used to describe a user.')
parser.add_argument("-i", "--user_id", required=True, help="The name of the json file describing the user you want to modify.")
parser.add_argument("-f", "--json_file", help="The name of the json file describing your modifications.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
if args.json_file is None:
print (misp.get_edit_user_fields_list(args.user_id))
else:
print(misp.edit_user_json(args.json_file, args.user_id))

View File

@ -1,126 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copy Emerging Threats Block IPs list to several MISP events
# Because of the large size of the list the first run will take a minute
# Running it again will update the MISP events if changes are detected
#
# This script requires PyMISP 2.4.50 or later
import sys, json, time, requests
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
et_url = 'https://rules.emergingthreats.net/fwrules/emerging-Block-IPs.txt'
et_str = 'Emerging Threats '
def init_misp():
global mymisp
mymisp = PyMISP(misp_url, misp_key, misp_verifycert)
def load_misp_event(eid):
global et_attr
global et_drev
global et_event
et_attr = {}
et_drev = {}
et_event = mymisp.get(eid)
echeck(et_event)
for a in et_event['Event']['Attribute']:
if a['category'] == 'Network activity':
et_attr[a['value']] = a['id']
continue
if a['category'] == 'Internal reference':
et_drev = a;
def init_et():
global et_data
global et_rev
requests.packages.urllib3.disable_warnings()
s = requests.Session()
r = s.get(et_url)
if r.status_code != 200:
raise Exception('Error getting ET data: {}'.format(r.text))
name = ''
et_data = {}
et_rev = 0
for line in r.text.splitlines():
if line.startswith('# Rev '):
et_rev = int(line[6:])
continue
if line.startswith('#'):
name = line[1:].strip()
if et_rev and not et_data.get(name):
et_data[name] = {}
continue
l = line.rstrip()
if l:
et_data[name][l] = name
def update_et_event(name):
if et_drev and et_rev and int(et_drev['value']) < et_rev:
# Copy MISP attributes to new dict
et_ips = dict.fromkeys(et_attr.keys())
# Weed out attributes still in ET data
for k,v in et_data[name].items():
et_attr.pop(k, None)
# Delete the leftover attributes from MISP
for k,v in et_attr.items():
r = mymisp.delete_attribute(v)
if r.get('errors'):
print "Error deleting attribute {} ({}): {}\n".format(v,k,r['errors'])
# Weed out ips already in the MISP event
for k,v in et_ips.items():
et_data[name].pop(k, None)
# Add new attributes to MISP event
ipdst = []
for i,k in enumerate(et_data[name].items(), 1-len(et_data[name])):
ipdst.append(k[0])
if i % 100 == 0:
r = mymisp.add_ipdst(et_event, ipdst)
echeck(r, et_event['Event']['id'])
ipdst = []
# Update revision number
et_drev['value'] = et_rev
et_drev.pop('timestamp', None)
attr = []
attr.append(et_drev)
# Publish updated MISP event
et_event['Event']['Attribute'] = attr
et_event['Event']['published'] = False
et_event['Event']['date'] = time.strftime('%Y-%m-%d')
r = mymisp.publish(et_event)
echeck(r, et_event['Event']['id'])
def echeck(r, eid=None):
if r.get('errors'):
if eid:
print "Processing event {} failed: {}".format(eid, r['errors'])
else:
print r['errors']
sys.exit(1)
if __name__ == '__main__':
init_misp()
init_et()
for et_type in set(et_data.keys()):
info = et_str + et_type
r = mymisp.search_index(eventinfo=info)
if r['response']:
eid=r['response'][0]['id']
else: # event not found, create it
new_event = mymisp.new_event(info=info, distribution=3, threat_level_id=4, analysis=1)
echeck(new_event)
eid=new_event['Event']['id']
r = mymisp.add_internal_text(new_event, 1, comment='Emerging Threats revision number')
echeck(r, eid)
load_misp_event(eid)
update_et_event(et_type)

View File

@ -4,9 +4,11 @@
from pymisp import ExpandedPyMISP
try:
from keys import url, key
verifycert = False
except ImportError:
url = 'http://localhost:8080'
key = '8h0gHbhS0fv6JUOlTED0AznLXFbf83TYtQrCycqb'
url = 'https://localhost:8443'
key = 'd6OmdDFvU3Seau3UjwvHS1y3tFQbaRNhJhDX0tjh'
verifycert = False
import argparse
import tools
@ -17,7 +19,8 @@ if __name__ == '__main__':
parser.add_argument("-a", "--attribute", type=int, help="Number of attributes per event (default 3000)")
args = parser.parse_args()
misp = ExpandedPyMISP(url, key, True)
misp = ExpandedPyMISP(url, key, verifycert)
misp.toggle_global_pythonify()
if args.limit is None:
args.limit = 1

View File

@ -4,7 +4,7 @@
import random
from random import randint
import string
from pymisp import MISPEvent
from pymisp import MISPEvent, MISPAttribute
def randomStringGenerator(size, chars=string.ascii_lowercase + string.digits):
@ -15,32 +15,34 @@ def randomIpGenerator():
return str(randint(0, 255)) + '.' + str(randint(0, 255)) + '.' + str(randint(0, 255)) + '.' + str(randint(0, 255))
def _attribute(category, type, value):
attribute = MISPAttribute()
attribute.category = category
attribute.type = type
attribute.value = value
return attribute
def floodtxt(misp, event, maxlength=255):
text = randomStringGenerator(randint(1, maxlength))
textfunctions = [misp.add_internal_comment, misp.add_internal_text, misp.add_internal_other, misp.add_email_subject, misp.add_mutex, misp.add_filename]
textfunctions[randint(0, 5)](event, text)
choose_from = [('Internal reference', 'comment', text), ('Internal reference', 'text', text),
('Internal reference', 'other', text), ('Network activity', 'email-subject', text),
('Artifacts dropped', 'mutex', text), ('Artifacts dropped', 'filename', text)]
misp.add_attribute(event, _attribute(*random.choice(choose_from)))
def floodip(misp, event):
ip = randomIpGenerator()
ipfunctions = [misp.add_ipsrc, misp.add_ipdst]
ipfunctions[randint(0, 1)](event, ip)
choose_from = [('Network activity', 'ip-src', ip), ('Network activity', 'ip-dst', ip)]
misp.add_attribute(event, _attribute(*random.choice(choose_from)))
def flooddomain(misp, event, maxlength=25):
a = randomStringGenerator(randint(1, maxlength))
b = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase)
domain = a + '.' + b
domainfunctions = [misp.add_hostname, misp.add_domain]
domainfunctions[randint(0, 1)](event, domain)
def flooddomainip(misp, event, maxlength=25):
a = randomStringGenerator(randint(1, maxlength))
b = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase)
domain = a + '.' + b
ip = randomIpGenerator()
misp.add_domain_ip(event, domain, ip)
choose_from = [('Network activity', 'domain', domain), ('Network activity', 'hostname', domain)]
misp.add_attribute(event, _attribute(*random.choice(choose_from)))
def floodemail(misp, event, maxlength=25):
@ -48,19 +50,15 @@ def floodemail(misp, event, maxlength=25):
b = randomStringGenerator(randint(1, maxlength))
c = randomStringGenerator(randint(2, 3), chars=string.ascii_lowercase)
email = a + '@' + b + '.' + c
emailfunctions = [misp.add_email_src, misp.add_email_dst]
emailfunctions[randint(0, 1)](event, email)
def floodattachment(misp, eventid, distribution, to_ids, category, comment, info, analysis, threat_level_id):
filename = randomStringGenerator(randint(1, 128))
misp.upload_sample(filename, 'dummy', eventid, distribution, to_ids, category, comment, info, analysis, threat_level_id)
choose_from = [('Network activity', 'email-dst', email), ('Network activity', 'email-src', email)]
misp.add_attribute(event, _attribute(*random.choice(choose_from)))
def create_dummy_event(misp):
event = misp.new_event(0, 4, 0, 'dummy event')
flooddomainip(misp, event)
floodattachment(misp, event['Event']['id'], event['Event']['distribution'], False, 'Payload delivery', '', event['Event']['info'], event['Event']['analysis'], event['Event']['threat_level_id'])
event = MISPEvent()
event.info = 'Dummy event'
event = misp.add_event(event, pythonify=True)
return event
def create_massive_dummy_events(misp, nbattribute):
@ -68,12 +66,6 @@ def create_massive_dummy_events(misp, nbattribute):
event.info = 'massive dummy event'
event = misp.add_event(event)
print(event)
eventid = event.id
distribution = '0'
functions = [floodtxt, floodip, flooddomain, flooddomainip, floodemail, floodattachment]
functions = [floodtxt, floodip, flooddomain, floodemail]
for i in range(nbattribute):
choice = randint(0, 5)
if choice == 5:
floodattachment(misp, eventid, distribution, False, 'Payload delivery', '', event.info, event.analysis, event.threat_level_id)
else:
functions[choice](misp, event)
functions[random.randint(0, len(functions) - 1)](misp, event)

View File

@ -0,0 +1,136 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Koen Van Impe
Disable the to_ids flag of an attribute when there are to many false positives
Put this script in crontab to run every /15 or /60
*/5 * * * * mispuser /usr/bin/python3 /home/mispuser/PyMISP/examples/falsepositive_disabletoids.py
Do inline config in "main"
'''
from pymisp import ExpandedPyMISP, MISPEvent
from keys import misp_url, misp_key, misp_verifycert
from datetime import datetime
from datetime import date
import datetime as dt
import smtplib
import mimetypes
from email.mime.multipart import MIMEMultipart
from email import encoders
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
import argparse
def init(url, key, verifycert):
'''
Template to get MISP module started
'''
return ExpandedPyMISP(url, key, verifycert, 'json')
if __name__ == '__main__':
minimal_fp = 0
threshold_to_ids = .50
minimal_date_sighting_date = '1970-01-01 00:00:00'
smtp_from = 'INSERT_FROM'
smtp_to = 'INSERT_TO'
smtp_server = 'localhost'
report_changes = ''
ts_format = '%Y-%m-%d %H:%M:%S'
parser = argparse.ArgumentParser(description="Disable the to_ids flag of attributes with a certain number of false positives above a threshold.")
parser.add_argument('-m', '--mail', action='store_true', help='Mail the report')
parser.add_argument('-o', '--mailoptions', action='store', help='mailoptions: \'smtp_from=INSERT_FROM;smtp_to=INSERT_TO;smtp_server=localhost\'')
parser.add_argument('-b', '--minimal-fp', default=minimal_fp, type=int, help='Minimal number of false positive (default: %(default)s )')
parser.add_argument('-t', '--threshold', default=threshold_to_ids, type=float, help='Threshold false positive/true positive rate (default: %(default)s )')
parser.add_argument('-d', '--minimal-date-sighting', default=minimal_date_sighting_date, help='Minimal date for sighting (false positive / true positive) (default: %(default)s )')
args = parser.parse_args()
misp = init(misp_url, misp_key, misp_verifycert)
minimal_fp = int(args.minimal_fp)
threshold_to_ids = args.threshold
minimal_date_sighting_date = args.minimal_date_sighting
minimal_date_sighting = int(dt.datetime.strptime(minimal_date_sighting_date, '%Y-%m-%d %H:%M:%S').strftime("%s"))
# Fetch all the attributes
result = misp.search('attributes', to_ids=1, include_sightings=1)
if 'Attribute' in result:
for attribute in result['Attribute']:
true_positive = 0
false_positive = 0
compute_threshold = 0
attribute_id = attribute['id']
attribute_value = attribute['value']
attribute_uuid = attribute['uuid']
event_id = attribute['event_id']
# Only do something if there is a sighting
if 'Sighting' in attribute:
for sighting in attribute['Sighting']:
if int(sighting['date_sighting']) > minimal_date_sighting:
if int(sighting['type']) == 0:
true_positive = true_positive + 1
elif int(sighting['type']) == 1:
false_positive = false_positive + 1
if false_positive > minimal_fp:
compute_threshold = false_positive / (true_positive + false_positive)
if compute_threshold >= threshold_to_ids:
# Fetch event title for report text
event_details = misp.get_event(event_id)
event_info = event_details['Event']['info']
misp.update_attribute( { 'uuid': attribute_uuid, 'to_ids': 0})
report_changes = report_changes + 'Disable to_ids for [%s] (%s) in event [%s] (%s) - FP: %s TP: %s \n' % (attribute_value, attribute_id, event_info, event_id, false_positive, true_positive)
# Changing the attribute to_ids flag sets the event to unpublished
misp.publish(event_id)
# Only send/print the report if it contains content
if report_changes:
if args.mail:
if args.mailoptions:
mailoptions = args.mailoptions.split(';')
for s in mailoptions:
if s.split('=')[0] == 'smtp_from':
smtp_from = s.split('=')[1]
if s.split('=')[0] == 'smtp_to':
smtp_to = s.split('=')[1]
if s.split('=')[0] == 'smtp_server':
smtp_server = s.split('=')[1]
now = datetime.now()
current_date = now.strftime(ts_format)
report_changes_body = 'MISP Disable to_ids flags for %s on %s\n-------------------------------------------------------------------------------\n\n' % (misp_url, current_date)
report_changes_body = report_changes_body + 'Minimal number of false positives before considering threshold: %s\n' % (minimal_fp)
report_changes_body = report_changes_body + 'Threshold false positives/true positives to disable to_ids flag: %s\n' % (threshold_to_ids)
report_changes_body = report_changes_body + 'Minimal date for sighting false positives: %s\n\n' % (minimal_date_sighting_date)
report_changes_body = report_changes_body + report_changes
report_changes_body = report_changes_body + '\nEvents that have attributes with changed to_ids flag have been republished, without e-mail notification.'
report_changes_body = report_changes_body + '\n\nMISP Disable to_ids Finished\n'
subject = 'Report of disable to_ids flag for false positives sightings of %s' % (current_date)
msg = MIMEMultipart()
msg['From'] = smtp_from
msg['To'] = smtp_to
msg['Subject'] = subject
msg.attach(MIMEText(report_changes_body, 'text'))
print(report_changes_body)
server = smtplib.SMTP(smtp_server)
server.sendmail(smtp_from, smtp_to, msg.as_string())
else:
print(report_changes)

View File

@ -8,12 +8,10 @@ from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
class CowrieMISPObject(AbstractMISPObjectGenerator):
def __init__(self, dico_val, **kargs):
self._dico_val = dico_val
self.name = "cowrie"
# Enforce attribute date with timestamp
super(CowrieMISPObject, self).__init__('cowrie',
default_attributes_parameters={'timestamp': int(time.time())},
**kargs)
default_attributes_parameters={'timestamp': int(time.time())},
**kargs)
self.generate_attributes()
def generate_attributes(self):

View File

@ -9,10 +9,16 @@
## Installation
````
```
# redis-server
sudo apt install redis-server
# Check if redis is running
redis-cli ping
# Feed generator
git clone https://github.com/CIRCL/PyMISP
cd examples/feed-generator-from-redis
git clone https://github.com/MISP/PyMISP
cd PyMISP/examples/feed-generator-from-redis
cp settings.default.py settings.py
vi settings.py # adjust your settings
@ -66,7 +72,7 @@ python3 server.py
>>> obj_data = { "session": "session_id", "username": "admin", "password": "admin", "protocol": "telnet" }
>>> generator.add_object_to_event(obj_name, **obj_data)
# Immediatly write the event to the disk (Bypassing the default flushing behavior)
# Immediately write the event to the disk (Bypassing the default flushing behavior)
>>> generator.flush_event()
```

View File

@ -1,15 +1,15 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import json
import argparse
import datetime
import json
import sys
import time
import redis
import settings
from generator import FeedGenerator
@ -60,7 +60,10 @@ class RedisToMISPFeed:
except Exception as error:
self.save_error_to_redis(error, data)
beautyful_sleep(5, self.format_last_action())
try:
beautyful_sleep(5, self.format_last_action())
except KeyboardInterrupt:
sys.exit(130)
def pop(self, key):
popped = self.serv.rpop(key)
@ -104,7 +107,7 @@ class RedisToMISPFeed:
# Suffix not provided, try to add anyway
if settings.fallback_MISP_type == 'attribute':
new_key = key + self.SUFFIX_ATTR
# Add atribute type from the config
# Add attribute type from the config
if 'type' not in data and settings.fallback_attribute_type:
data['type'] = settings.fallback_attribute_type
else:

View File

@ -1,14 +1,13 @@
#!/usr/bin/env python3
import sys
import datetime
import hashlib
import json
import os
import hashlib
import datetime
import sys
import time
import uuid
from pymisp import MISPEvent
from pymisp import MISPEvent, MISPOrganisation
import settings
@ -35,11 +34,6 @@ def get_system_templates():
return templates
def gen_uuid():
"""Generate a random UUID and returns its string representation"""
return str(uuid.uuid4())
class FeedGenerator:
"""Helper object to create MISP feed.
@ -127,29 +121,44 @@ class FeedGenerator:
if ('|' in attr_type or attr_type == 'malware-sample'):
split = attr_value.split('|')
self.attributeHashes.append([
hashlib.md5(str(split[0]).encode("utf-8")).hexdigest(),
hashlib.md5(str(split[0]).encode("utf-8"), usedforsecurity=False).hexdigest(),
self.current_event_uuid
])
self.attributeHashes.append([
hashlib.md5(str(split[1]).encode("utf-8")).hexdigest(),
hashlib.md5(str(split[1]).encode("utf-8"), usedforsecurity=False).hexdigest(),
self.current_event_uuid
])
else:
self.attributeHashes.append([
hashlib.md5(str(attr_value).encode("utf-8")).hexdigest(),
hashlib.md5(str(attr_value).encode("utf-8"), usedforsecurity=False).hexdigest(),
self.current_event_uuid
])
# Manifest
def _init_manifest(self):
# check if outputdir exists and try to create it if not
if not os.path.exists(settings.outputdir):
try:
os.makedirs(settings.outputdir)
except PermissionError as error:
print(error)
print("Please fix the above error and try again.")
sys.exit(126)
# create an empty manifest
with open(os.path.join(settings.outputdir, 'manifest.json'), 'w'):
pass
try:
with open(os.path.join(settings.outputdir, 'manifest.json'), 'w') as f:
json.dump({}, f)
except PermissionError as error:
print(error)
print("Please fix the above error and try again.")
sys.exit(126)
# create new event and save manifest
self.create_daily_event()
def flush_event(self, new_event=None):
print('Writting event on disk'+' '*50)
print('Writing event on disk' + ' ' * 50)
if new_event is not None:
event_uuid = new_event['uuid']
event = new_event
@ -157,9 +166,8 @@ class FeedGenerator:
event_uuid = self.current_event_uuid
event = self.current_event
eventFile = open(os.path.join(settings.outputdir, event_uuid+'.json'), 'w')
eventFile.write(event.to_json())
eventFile.close()
with open(os.path.join(settings.outputdir, event_uuid + '.json'), 'w') as eventFile:
json.dump(event.to_feed(), eventFile)
self.save_hashes()
@ -182,27 +190,11 @@ class FeedGenerator:
hashFile.write('{},{}\n'.format(element[0], element[1]))
hashFile.close()
self.attributeHashes = []
print('Hash saved' + ' '*30)
print('Hash saved' + ' ' * 30)
except Exception as e:
print(e)
sys.exit('Could not create the quick hash lookup file.')
def _addEventToManifest(self, event):
event_dict = event.to_dict()['Event']
tags = []
for eventTag in event_dict.get('EventTag', []):
tags.append({'name': eventTag['Tag']['name'],
'colour': eventTag['Tag']['colour']})
return {
'Orgc': event_dict.get('Orgc', []),
'Tag': tags,
'info': event_dict['info'],
'date': event_dict['date'],
'analysis': event_dict['analysis'],
'threat_level_id': event_dict['threat_level_id'],
'timestamp': event_dict.get('timestamp', int(time.time()))
}
def get_last_event_from_manifest(self):
"""Retreive last event from the manifest.
@ -225,7 +217,7 @@ class FeedGenerator:
# Sort by date then by event name
dated_events.sort(key=lambda k: (k[0], k[2]), reverse=True)
return dated_events[0]
except FileNotFoundError as e:
except FileNotFoundError:
print('Manifest not found, generating a fresh one')
self._init_manifest()
return self.get_last_event_from_manifest()
@ -248,11 +240,9 @@ class FeedGenerator:
return event
def create_daily_event(self):
new_uuid = gen_uuid()
today = str(datetime.date.today())
event_dict = {
'uuid': new_uuid,
'id': len(self.manifest)+1,
'id': len(self.manifest) + 1,
'Tag': settings.Tag,
'info': self.daily_event_name.format(today),
'analysis': settings.analysis, # [0-2]
@ -264,14 +254,14 @@ class FeedGenerator:
event.from_dict(**event_dict)
# reference org
org_dict = {}
org_dict['name'] = settings.org_name
org_dict['uui'] = settings.org_uuid
event['Orgc'] = org_dict
org = MISPOrganisation()
org.name = settings.org_name
org.uuid = settings.org_uuid
event.Orgc = org
# save event on disk
self.flush_event(new_event=event)
# add event to manifest
self.manifest[event['uuid']] = self._addEventToManifest(event)
self.manifest.update(event.manifest)
self.save_manifest()
return event

View File

@ -7,7 +7,7 @@ This python script can be used to generate a MISP feed based on an existing MISP
````
git clone https://github.com/MISP/PyMISP.git
cd examples/feed-generator
cp settings-default.py settings.py
cp settings.default.py settings.py
vi settings.py #adjust your settings
python3 generate.py
````

View File

@ -4,149 +4,55 @@
import sys
import json
import os
import hashlib
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from settings import url, key, ssl, outputdir, filters, valid_attribute_distribution_levels
try:
from settings import with_distribution
except ImportError:
with_distribution = False
objectsFields = {
'Attribute': {
'uuid',
'value',
'category',
'type',
'comment',
'data',
'timestamp',
'to_ids',
'object_relation'
},
'Event': {
'uuid',
'info',
'threat_level_id',
'analysis',
'timestamp',
'publish_timestamp',
'published',
'date'
},
'Object': {
'name',
'meta-category',
'description',
'template_uuid',
'template_version',
'uuid',
'timestamp',
'distribution',
'sharing_group_id',
'comment'
},
'ObjectReference': {
'uuid',
'timestamp',
'relationship_type',
'comment',
'object_uuid',
'referenced_uuid'
},
'Orgc': {
'name',
'uuid'
},
'Tag': {
'name',
'colour',
'exportable'
}
}
try:
from settings import with_local_tags
except ImportError:
with_local_tags = True
objectsToSave = {
'Orgc': {},
'Tag': {},
'Attribute': {
'Tag': {}
},
'Object': {
'Attribute': {
'Tag': {}
},
'ObjectReference': {}
}
}
try:
from settings import include_deleted
except ImportError:
include_deleted = False
try:
from settings import exclude_attribute_types
except ImportError:
exclude_attribute_types = []
valid_attribute_distributions = []
attributeHashes = []
def init():
# If we have an old settings.py file then this variable won't exist
global valid_attribute_distributions
try:
valid_attribute_distributions = valid_attribute_distribution_levels
valid_attribute_distributions = [int(v) for v in valid_attribute_distribution_levels]
except Exception:
valid_attribute_distributions = ['0', '1', '2', '3', '4', '5']
return PyMISP(url, key, ssl)
valid_attribute_distributions = [0, 1, 2, 3, 4, 5]
return ExpandedPyMISP(url, key, ssl)
def recursiveExtract(container, containerType, leaf, eventUuid):
temp = {}
if containerType in ['Attribute', 'Object']:
if (__blockByDistribution(container)):
return False
for field in objectsFields[containerType]:
if field in container:
temp[field] = container[field]
if (containerType == 'Attribute'):
global attributeHashes
if ('|' in container['type'] or container['type'] == 'malware-sample'):
split = container['value'].split('|')
attributeHashes.append([hashlib.md5(split[0].encode("utf-8")).hexdigest(), eventUuid])
attributeHashes.append([hashlib.md5(split[1].encode("utf-8")).hexdigest(), eventUuid])
else:
attributeHashes.append([hashlib.md5(container['value'].encode("utf-8")).hexdigest(), eventUuid])
children = leaf.keys()
for childType in children:
childContainer = container.get(childType)
if (childContainer):
if (type(childContainer) is dict):
temp[childType] = recursiveExtract(childContainer, childType, leaf[childType], eventUuid)
else:
temp[childType] = []
for element in childContainer:
processed = recursiveExtract(element, childType, leaf[childType], eventUuid)
if (processed):
temp[childType].append(processed)
return temp
def saveEvent(misp, uuid):
event = misp.get_event(uuid)
if not event.get('Event'):
print('Error while fetching event: {}'.format(event['message']))
sys.exit('Could not create file for event ' + uuid + '.')
event['Event'] = recursiveExtract(event['Event'], 'Event', objectsToSave, event['Event']['uuid'])
event = json.dumps(event)
eventFile = open(os.path.join(outputdir, uuid + '.json'), 'w')
eventFile.write(event)
eventFile.close()
def __blockByDistribution(element):
if element['distribution'] not in valid_attribute_distributions:
return True
return False
def saveHashes():
if not attributeHashes:
return False
def saveEvent(event):
try:
hashFile = open(os.path.join(outputdir, 'hashes.csv'), 'w')
for element in attributeHashes:
hashFile.write('{},{}\n'.format(element[0], element[1]))
hashFile.close()
with open(os.path.join(outputdir, f'{event["Event"]["uuid"]}.json'), 'w') as f:
json.dump(event, f, indent=2)
except Exception as e:
print(e)
sys.exit('Could not create the event dump.')
def saveHashes(hashes):
try:
with open(os.path.join(outputdir, 'hashes.csv'), 'w') as hashFile:
for element in hashes:
hashFile.write('{},{}\n'.format(element[0], element[1]))
except Exception as e:
print(e)
sys.exit('Could not create the quick hash lookup file.')
@ -162,41 +68,39 @@ def saveManifest(manifest):
sys.exit('Could not create the manifest file.')
def __addEventToManifest(event):
tags = []
for eventTag in event['EventTag']:
tags.append({'name': eventTag['Tag']['name'],
'colour': eventTag['Tag']['colour']})
return {'Orgc': event['Orgc'],
'Tag': tags,
'info': event['info'],
'date': event['date'],
'analysis': event['analysis'],
'threat_level_id': event['threat_level_id'],
'timestamp': event['timestamp']
}
if __name__ == '__main__':
misp = init()
try:
r = misp.get_index(filters)
events = r['response']
print(events[0])
events = misp.search_index(minimal=True, **filters, pythonify=False)
except Exception as e:
print(e)
sys.exit("Invalid response received from MISP.")
if len(events) == 0:
sys.exit("No events returned.")
manifest = {}
hashes = []
counter = 1
total = len(events)
for event in events:
saveEvent(misp, event['uuid'])
manifest[event['uuid']] = __addEventToManifest(event)
try:
e = misp.get_event(event['uuid'], deleted=include_deleted, pythonify=True)
if exclude_attribute_types:
for i, attribute in enumerate(e.attributes):
if attribute.type in exclude_attribute_types:
e.attributes.pop(i)
e_feed = e.to_feed(valid_distributions=valid_attribute_distributions, with_meta=True, with_distribution=with_distribution, with_local_tags=with_local_tags)
except Exception as err:
print(err, event['uuid'])
continue
if not e_feed:
print(f'Invalid distribution {e.distribution}, skipping')
continue
hashes += [[h, e.uuid] for h in e_feed['Event'].pop('_hashes')]
manifest.update(e_feed['Event'].pop('_manifest'))
saveEvent(e_feed)
print("Event " + str(counter) + "/" + str(total) + " exported.")
counter += 1
saveManifest(manifest)
print('Manifest saved.')
saveHashes()
saveHashes(hashes)
print('Hashes saved. Feed creation completed.')

View File

@ -16,11 +16,13 @@ outputdir = 'output'
# you can use on the event index, such as organisation, tags, etc.
# It uses the same joining and condition rules as the API parameters
# For example:
# filters = {'tag':'tlp:white|feed-export|!privint','org':'CIRCL', 'published':1}
# filters = {'tags':['tlp:white','feed-export','!privint'],'org':'CIRCL', 'published':1}
# the above would generate a feed for all published events created by CIRCL,
# tagged tlp:white and/or feed-export but exclude anything tagged privint
filters = {'published':'true'}
# Include deleted attributes and objects in the events
include_deleted = False
# By default all attributes will be included in the feed generation
# Remove the levels that you do not wish to include in the feed
@ -37,3 +39,18 @@ filters = {'published':'true'}
# 5: Inherit Event
valid_attribute_distribution_levels = ['0', '1', '2', '3', '4', '5']
# By default, all attribute passing the filtering rules will be exported.
# This setting can be used to filter out any attributes being of the type contained in the list.
# Warning: Keep in mind that if you propagate data (via synchronisation/feeds/...), recipients
# will not be able to get these attributes back unless their events get updated.
# For example:
# exclude_attribute_types = ['malware-sample']
exclude_attribute_types = []
# Include the distribution and sharing group information (and names/UUIDs of organisations in those Sharing Groups)
# Set this to False if you want to discard the distribution metadata. That way all data will inherit the distribution
# the feed
with_distribution = False
# Include the exportable local tags along with the global tags. The default is True.
with_local_tags = True

View File

@ -3,22 +3,13 @@
from keys import misp_url, misp_key, misp_verifycert
import argparse
from pymisp import PyMISP
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json', debug=False)
from pymisp import ExpandedPyMISP
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Fetch all events from a feed.')
parser.add_argument("-f", "--feed", required=True, help="feed's ID to be fetched.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
misp.fetch_feed(args.feed)

View File

@ -0,0 +1,38 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key
import argparse
def init(url, key):
return PyMISP(url, key)
def loop_attributes(elem):
if 'Attribute' in elem.keys():
for attribute in elem['Attribute']:
if 'warnings' in attribute.keys():
for warning in attribute['warnings']:
print("Value {} has a hit in warninglist with name '{}' and id '{}'".format(warning['value'],
warning[
'warninglist_name'],
warning[
'warninglist_id']))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Print all warninglist hits for an event.')
parser.add_argument("eventid", type=str, help="The event id of the event to get info of")
args = parser.parse_args()
misp = init(misp_url, misp_key)
evt = misp.search('events', eventid=args.eventid, includeWarninglistHits=1)['response'][0]['Event']
if 'warnings' in evt.keys():
print('warnings in entire event:')
print(str(evt['warnings']) + '\n')
print('Warnings at attribute levels:')
loop_attributes(evt)
if 'Object' in evt.keys():
for obj in evt['Object']:
loop_attributes(obj)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse

View File

@ -5,7 +5,7 @@ import argparse
import json
try:
from pymisp import MISPEncode
from pymisp import pymisp_json_default, AbstractMISP
from pymisp.tools import make_binary_objects
except ImportError:
pass
@ -43,6 +43,15 @@ def make_objects(path):
to_return['references'] += s.ObjectReference
if peo:
if hasattr(peo, 'certificates') and hasattr(peo, 'signers'):
# special authenticode case for PE objects
for c in peo.certificates:
to_return['objects'].append(c)
for s in peo.signers:
to_return['objects'].append(s)
del peo.certificates
del peo.signers
del peo.sections
to_return['objects'].append(peo)
if peo.ObjectReference:
to_return['references'] += peo.ObjectReference
@ -51,7 +60,8 @@ def make_objects(path):
to_return['objects'].append(fo)
if fo.ObjectReference:
to_return['references'] += fo.ObjectReference
return json.dumps(to_return, cls=MISPEncode)
return json.dumps(to_return, default=pymisp_json_default)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extract indicators out of binaries and returns MISP objects.')
@ -59,6 +69,7 @@ if __name__ == '__main__':
group.add_argument("-p", "--path", help="Path to process.")
group.add_argument("-c", "--check", action='store_true', help="Check the dependencies.")
args = parser.parse_args()
a = AbstractMISP()
if args.check:
print(check())

View File

@ -0,0 +1,15 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pymisp.tools import feed_meta_generator
import argparse
from pathlib import Path
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Build meta files for feed')
parser.add_argument("--feed", required=True, help="Path to directory containing the feed.")
args = parser.parse_args()
feed = Path(args.feed)
feed_meta_generator(feed)

View File

@ -1,15 +1,12 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
import os
import json
# Usage for pipe masters: ./last.py -l 5h | jq .
proxies = {
'http': 'http://127.0.0.1:8123',
'https': 'http://127.0.0.1:8123',
@ -18,18 +15,6 @@ proxies = {
proxies = None
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json', proxies=proxies)
def get_event(m, event, out=None):
result = m.get_event(event)
if out is None:
print(json.dumps(result) + '\n')
else:
with open(out, 'w') as f:
f.write(json.dumps(result) + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get an event from a MISP instance.')
@ -42,6 +27,11 @@ if __name__ == '__main__':
print('Output file already exists, abort.')
exit(0)
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, proxies=proxies)
get_event(misp, args.event, args.output)
event = misp.get_event(args.event, pythonify=True)
if args.output:
with open(args.output, 'w') as f:
f.write(event.to_json())
else:
print(event.to_json())

View File

@ -1,26 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get an attachment.')
parser.add_argument("-a", "--attribute", type=int, help="Attribute ID to download.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
with open('foo', 'wb') as f:
out = misp.get_attachment(args.attribute)
if isinstance(out, dict):
# Fails
print(out)
else:
f.write(out)

View File

@ -9,6 +9,7 @@ from keys import misp_url, misp_key, misp_verifycert
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get MISP stuff as CSV.')
parser.add_argument("--controller", default='attributes', help="Attribute to use for the search (events, objects, attributes)")
parser.add_argument("-e", "--event_id", help="Event ID to fetch. Without it, it will fetch the whole database.")
parser.add_argument("-a", "--attribute", nargs='+', help="Attribute column names")
parser.add_argument("-o", "--object_attribute", nargs='+', help="Object attribute column names")
@ -26,7 +27,7 @@ if __name__ == '__main__':
if not attr:
attr = None
print(args.context)
response = pymisp.search(return_format='csv', eventid=args.event_id, requested_attributes=attr,
response = pymisp.search(return_format='csv', controller=args.controller, eventid=args.event_id, requested_attributes=attr,
type_attribute=args.misp_types, include_context=args.context)
if args.outfile:

View File

@ -4,3 +4,6 @@
misp_url = 'https://<your MISP URL>/'
misp_key = 'Your MISP auth key' # The MISP auth key can be found on the MISP web interface under the automation section
misp_verifycert = True
misp_client_cert = ''
proofpoint_sp = '<proofpoint service principal>' # Service Principal from TAP (https://threatinsight.proofpoint.com/<custID>/settings/connected-applications)
proofpoint_secret = '<proofpoint secret>'

View File

@ -1,43 +1,48 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
try:
from keys import misp_client_cert
except ImportError:
misp_client_cert = ''
import argparse
import os
import json
# Usage for pipe masters: ./last.py -l 5h | jq .
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
def download_last(m, last, out=None):
result = m.download_last(last)
if out is None:
if 'response' in result:
print(json.dumps(result['response']))
else:
print('No results for that time period')
exit(0)
else:
with open(out, 'w') as f:
f.write(json.dumps(result['response']))
# Usage in case of large data set and pivoting page by page: python3 last.py -l 48h -m 10 -p 2 | jq .[].Event.info
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Download latest events from a MISP instance.')
parser.add_argument("-l", "--last", required=True, help="can be defined in days, hours, minutes (for example 5d or 12h or 30m).")
parser.add_argument("-m", "--limit", required=False, default="10", help="Add the limit of records to get (by default, the limit is set to 10)")
parser.add_argument("-p", "--page", required=False, default="1", help="Add the page to request to paginate over large dataset (by default page is set to 1)")
parser.add_argument("-o", "--output", help="Output file")
args = parser.parse_args()
if args.output is not None and os.path.exists(args.output):
print('Output file already exists, abord.')
print('Output file already exists, aborted.')
exit(0)
misp = init(misp_url, misp_key)
if misp_client_cert == '':
misp_client_cert = None
else:
misp_client_cert = (misp_client_cert)
download_last(misp, args.last, args.output)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert, cert=misp_client_cert)
result = misp.search(publish_timestamp=args.last, limit=args.limit, page=args.page, pythonify=True)
if not result:
print('No results for that time period')
exit(0)
if args.output:
with open(args.output, 'w') as f:
for r in result:
f.write(r.to_json() + '\n')
else:
for r in result:
print(r.to_json())

View File

@ -10,7 +10,7 @@ from pymisp import MISPEvent
try:
from keys import misp_url, misp_key, misp_verifycert
from pymisp import ExpandedPyMISP
from pymisp import PyMISP
offline = False
except ImportError as e:
offline = True
@ -22,9 +22,14 @@ Example:
load_csv.py -n file -p /tmp/foo.csv
CSV sample file: tests/csv_testfiles/valid_fieldnames.csv
* If you want to force the fieldnames:
load_csv.py -n file -p /tmp/foo.csv -f SHA1 fileName size-in-bytes
CSV sample file: tests/csv_testfiles/invalid_fieldnames.csv
'''
@ -35,6 +40,8 @@ if __name__ == '__main__':
parser.add_argument("-f", "--fieldnames", nargs='*', default=[], help="Fieldnames of the CSV, have to match the object-relation allowed in the template. If empty, the fieldnames of the CSV have to match the template.")
parser.add_argument("-s", "--skip_fieldnames", action='store_true', help="Skip fieldnames in the CSV.")
parser.add_argument("-d", "--dump", action='store_true', help="(Debug) Dump the object in the terminal.")
parser.add_argument("--delimiter", type=str, default=',', help="Delimiter between firlds in the CSV. Default: ','.")
parser.add_argument("--quotechar", type=str, default='"', help="Quote character of the fields in the CSV. Default: '\"'.")
# Interact with MISP
misp_group = parser.add_mutually_exclusive_group()
@ -48,7 +55,8 @@ if __name__ == '__main__':
else:
has_fieldnames = args.skip_fieldnames
csv_loader = CSVLoader(template_name=args.object_name, csv_path=args.path,
fieldnames=args.fieldnames, has_fieldnames=has_fieldnames)
fieldnames=args.fieldnames, has_fieldnames=has_fieldnames,
delimiter=args.delimiter, quotechar=args.quotechar)
objects = csv_loader.load()
if args.dump:
@ -58,13 +66,13 @@ if __name__ == '__main__':
if offline:
print('You are in offline mode, quitting.')
else:
misp = ExpandedPyMISP(url=misp_url, key=misp_key, ssl=misp_verifycert)
misp = PyMISP(url=misp_url, key=misp_key, ssl=misp_verifycert)
if args.new_event:
event = MISPEvent()
event.info = args.new_event
for o in objects:
event.add_object(**o)
new_event = misp.add_event(event)
new_event = misp.add_event(event, pythonify=True)
if isinstance(new_event, str):
print(new_event)
elif 'id' in new_event:
@ -72,9 +80,9 @@ if __name__ == '__main__':
else:
print('Something went wrong:')
print(new_event)
else:
elif args.update_event:
for o in objects:
new_object = misp.add_object(args.update_event, o)
new_object = misp.add_object(args.update_event, o, pythonify=True)
if isinstance(new_object, str):
print(new_object)
elif new_object.attributes:
@ -82,3 +90,5 @@ if __name__ == '__main__':
else:
print('Something went wrong:')
print(new_event)
else:
print('you need to pass either a event info field (flag -i), or the event ID you want to update (flag -u)')

203
examples/proofpoint_tap.py Normal file
View File

@ -0,0 +1,203 @@
import requests
from requests.auth import HTTPBasicAuth
import json
from pymisp import ExpandedPyMISP, MISPEvent
from keys import misp_url, misp_key, misp_verifycert, proofpoint_sp, proofpoint_secret
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if proofpoint_secret == '<proofpoint secret>':
print('Set the proofpoint_secret in keys.py before running. Exiting...')
quit()
# initialize PyMISP and set url for Panorama
misp = ExpandedPyMISP(url=misp_url, key=misp_key, ssl=misp_verifycert)
urlSiem = "https://tap-api-v2.proofpoint.com/v2/siem/all"
alertType = ("messagesDelivered", "messagesBlocked", "clicksPermitted", "clicksBlocked")
# max query is 1h, and we want Proofpoint TAP api to return json
queryString = {
"sinceSeconds": "3600",
"format": "json"
}
responseSiem = requests.request("GET", urlSiem, params=queryString, auth=HTTPBasicAuth(proofpoint_sp, proofpoint_secret))
if 'Credentials authentication failed' in responseSiem.text:
print('Credentials invalid, please edit keys.py and try again')
quit()
jsonDataSiem = json.loads(responseSiem.text)
for alert in alertType:
for messages in jsonDataSiem[alert]:
# initialize and set MISPEvent()
event = MISPEvent()
if alert == "messagesDelivered" or alert == "messagesBlocked":
if alert == "messagesDelivered":
event.info = alert
event.distribution = 0 # Optional, defaults to MISP.default_event_distribution in MISP config
event.threat_level_id = 2 # setting this to 0 breaks the integration
event.analysis = 0 # Optional, defaults to 0 (initial analysis)
else:
event.info = alert
event.distribution = 0 # Optional, defaults to MISP.default_event_distribution in MISP config
event.threat_level_id = 2 # BLOCKED = LOW
event.analysis = 0 # Optional, defaults to 0 (initial analysis)
recipient = event.add_attribute('email-dst', messages["recipient"][0])
recipient.comment = 'recipient address'
sender = event.add_attribute('email-src', messages["sender"])
sender.comment = 'sender address'
if messages["fromAddress"] is not None and messages["fromAddress"] != "" :
fromAddress = event.add_attribute('email-src-display-name', messages["fromAddress"])
headerFrom = event.add_attribute('email-header', messages["headerFrom"])
headerFrom.comment = 'email header from'
senderIP = event.add_attribute('ip-src', messages["senderIP"])
senderIP.comment = 'sender IP'
subject = event.add_attribute('email-subject', messages["subject"])
subject.comment = 'email subject'
if messages["quarantineFolder"] is not None and messages["quarantineFolder"] != "":
quarantineFolder = event.add_attribute('comment', messages["quarantineFolder"])
quarantineFolder.comment = 'quarantine folder'
if messages["quarantineRule"] is not None and messages["quarantineRule"] != "":
quarantineRule = event.add_attribute('comment', messages["quarantineRule"])
quarantineRule.comment = 'quarantine rule'
messageSize = event.add_attribute('size-in-bytes', messages["messageSize"])
messageSize.comment = 'size of email in bytes'
malwareScore = event.add_attribute('comment', messages["malwareScore"])
malwareScore.comment = 'malware score'
phishScore = event.add_attribute('comment', messages["phishScore"])
phishScore.comment = 'phish score'
spamScore = event.add_attribute('comment', messages["spamScore"])
spamScore.comment = 'spam score'
imposterScore = event.add_attribute('comment', messages["impostorScore"])
imposterScore.comment = 'impostor score'
completelyRewritten = event.add_attribute('comment', messages["completelyRewritten"])
completelyRewritten.comment = 'proofpoint url defense'
# grab the threat info for each message in TAP
for threatInfo in messages["threatsInfoMap"]:
threat_type = {
"url": "url",
"attachment": "email-attachment",
"message": "email-body"
}
threat = event.add_attribute(threat_type.get(threatInfo["threatType"]), threatInfo["threat"])
threat.comment = 'threat'
threatUrl = event.add_attribute('link', threatInfo["threatUrl"])
threatUrl.comment = 'link to threat in TAP'
threatStatus = event.add_attribute('comment', threatInfo["threatStatus"])
threatStatus.comment = "proofpoint's threat status"
event.add_tag(threatInfo["classification"])
# get campaignID from each TAP alert and query campaign API
if threatInfo["campaignID"] is not None and threatInfo["campaignID"] != "":
urlCampaign = "https://tap-api-v2.proofpoint.com/v2/campaign/" + threatInfo["campaignID"]
responseCampaign = requests.request("GET", urlCampaign, auth=HTTPBasicAuth(proofpoint_sp, proofpoint_secret))
jsonDataCampaign = json.loads(responseCampaign.text)
campaignType = ("actors", "families", "malware", "techniques")
# loop through campaignType and grab tags to add to MISP event
for tagType in campaignType:
for tag in jsonDataCampaign[tagType]:
event.add_tag(tag['name'])
# grab which policy route the message took
for policy in messages["policyRoutes"]:
policyRoute = event.add_attribute('comment', policy)
policyRoute.comment = 'email policy route'
# was the threat in the body of the email or is it an attachment?
for parts in messages["messageParts"]:
disposition = event.add_attribute('comment', parts["disposition"])
disposition.comment = 'email body or attachment'
# sha256 hash of threat
if parts["sha256"] is not None and parts["sha256"] != "":
sha256 = event.add_attribute('sha256', parts["sha256"])
sha256.comment = 'sha256 hash'
# md5 hash of threat
if parts["md5"] is not None and parts["md5"] != "":
md5 = event.add_attribute('md5', parts["md5"])
md5.comment = 'md5 hash'
# filename of threat
if parts["filename"] is not None and parts["filename"] != "":
filename = event.add_attribute('filename', parts["filename"])
filename.comment = 'filename'
misp.add_event(event.to_json())
if alert == "clicksPermitted" or alert == "clicksBlocked":
if alert == "clicksPermitted":
print(alert + " is a permitted click")
event.info = alert
event.distribution = 0 # Optional, defaults to MISP.default_event_distribution in MISP config
event.threat_level_id = 2 # setting this to 0 breaks the integration
event.analysis = 0 # Optional, defaults to 0 (initial analysis)
else:
print(alert + " is a blocked click")
event.info = alert
event.distribution = 0 # Optional, defaults to MISP.default_event_distribution in MISP config
event.threat_level_id = 2 # BLOCKED = LOW
event.analysis = 0 # Optional, defaults to 0 (initial analysis)
event.add_tag(messages["classification"])
campaignId = event.add_attribute('campaign-id', messages["campaignId"][0])
campaignId.comment = 'campaignId'
clickIP = event.add_attribute('ip-src', messages["clickIP"])
clickIP.comment = 'clickIP'
clickTime = event.add_attribute('datetime', messages["clickTime"])
clickTime.comment = 'clicked threat'
threatTime = event.add_attribute('datetime', messages["threatTime"])
threatTime.comment = 'identified threat'
GUID = event.add_attribute('comment', messages["GUID"])
GUID.comment = 'PPS message ID'
recipient = event.add_attribute('email-dst', messages["recipient"][0])
recipient.comment = 'recipient address'
sender = event.add_attribute('email-src', messages["sender"])
sender.comment = 'sender address'
senderIP = event.add_attribute('ip-src', messages["senderIP"])
senderIP.comment = 'sender IP'
threatURL = event.add_attribute('link', messages["threatURL"])
threatURL.comment = 'link to threat in TAP'
url = event.add_attribute('link', messages["url"])
url.comment = 'malicious url clicked'
userAgent = event.add_attribute('user-agent', messages["userAgent"])
misp.add_event(event.to_json())

View File

@ -0,0 +1,65 @@
import requests
import json
from pymisp import ExpandedPyMISP, MISPEvent, MISPOrganisation
from keys import misp_url, misp_key, misp_verifycert, proofpoint_key
# initialize PyMISP and set url for Panorama
misp = ExpandedPyMISP(url=misp_url, key=misp_key, ssl=misp_verifycert)
urlVap = "https://tap-api-v2.proofpoint.com/v2/people/vap?window=30" # Window can be 14, 30, and 90 Days
headers = {
'Authorization': "Basic " + proofpoint_key
}
responseVap = requests.request("GET", urlVap, headers=headers)
jsonDataVap = json.loads(responseVap.text)
for alert in jsonDataVap["users"]:
orgc = MISPOrganisation()
orgc.name = 'Proofpoint'
orgc.id = '#{ORGC.ID}' # organisation id
orgc.uuid = '#{ORGC.UUID}' # organisation uuid
# initialize and set MISPEvent()
event = MISPEvent()
event.Orgc = orgc
event.info = 'Very Attacked Person ' + jsonDataVap["interval"]
event.distribution = 0 # Optional, defaults to MISP.default_event_distribution in MISP config
event.threat_level_id = 2 # setting this to 0 breaks the integration
event.analysis = 0 # Optional, defaults to 0 (initial analysis)
totalVapUsers = event.add_attribute('counter', jsonDataVap["totalVapUsers"], comment="Total VAP Users")
averageAttackIndex = event.add_attribute('counter', jsonDataVap["averageAttackIndex"], comment="Average Attack Count")
vapAttackIndexThreshold = event.add_attribute('counter', jsonDataVap["vapAttackIndexThreshold"], comment="Attack Threshold")
emails = event.add_attribute('email-dst', alert["identity"]["emails"], comment="Email Destination")
attack = event.add_attribute('counter', alert["threatStatistics"]["attackIndex"], comment="Attack Count")
vip = event.add_attribute('other', str(alert["identity"]["vip"]), comment="VIP")
guid = event.add_attribute('other', alert["identity"]["guid"], comment="GUID")
if alert["identity"]["customerUserId"] is not None:
customerUserId = event.add_attribute('other', alert["identity"]["customerUserId"], comment="Customer User Id")
if alert["identity"]["department"] is not None:
department = event.add_attribute(alert['other', "identity"]["department"], comment="Department")
if alert["identity"]["location"] is not None:
location = event.add_attribute('other', alert["identity"]["location"], comment="Location")
if alert["identity"]["name"] is not None:
name = event.add_attribute('target-user', alert["identity"]["name"], comment="Name")
if alert["identity"]["title"] is not None:
title = event.add_attribute('other', alert["identity"]["title"], comment="Title")
event.add_tag("VAP")
misp.add_event(event.to_json())

View File

@ -14,7 +14,7 @@ def init(url, key):
def search_sighting(m, context, out=None, **kwargs):
result = m.sighting_search(context, **kwargs)
result = m.search_sightings(context, **kwargs)
if out is None:
print(json.dumps(result['response']))
else:

View File

@ -1,41 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key,misp_verifycert
import argparse
import os
import json
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
def searchall(m, search, quiet, url, out=None):
result = m.search_all(search)
if quiet:
for e in result['response']:
print('{}{}{}\n'.format(url, '/events/view/', e['Event']['id']))
elif out is None:
print(json.dumps(result['response']))
else:
with open(out, 'w') as f:
f.write(json.dumps(result['response']))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get all the events matching a value.')
parser.add_argument("-s", "--search", required=True, help="String to search.")
parser.add_argument("-q", "--quiet", action='store_true', help="Only display URLs to MISP")
parser.add_argument("-o", "--output", help="Output file")
args = parser.parse_args()
if args.output is not None and os.path.exists(args.output):
print('Output file already exists, abord.')
exit(0)
misp = init(misp_url, misp_key)
searchall(misp, args.search, args.quiet, misp_url, args.output)

View File

@ -0,0 +1,32 @@
#!/usr/bin/env python
import requests
import json
# Suppress those "Unverified HTTPS request is being made"
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from keys import misp_url, misp_key, misp_verifycert
proxies = {
}
'''
Checks if the connection to a sync server works
returns json object
'''
def check_connection(connection_number):
misp_headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': misp_key}
req = requests.get(misp_url + 'servers/testConnection/{}'.format(connection_number), verify=misp_verifycert, headers=misp_headers, proxies=proxies)
result = json.loads(req.text)
return(result)
if __name__ == "__main__":
result = check_connection(1)
print(result)

View File

@ -1,24 +1,15 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get a list of the sharing groups from the MISP instance.')
misp = init(misp_url, misp_key)
misp = ExpandedPyMISP(misp_url, misp_key, misp_verifycert)
sharing_groups = misp.get_sharing_groups()
print (sharing_groups)
sharing_groups = misp.sharing_groups(pythonify=True)
print(sharing_groups)

168
examples/show_sightings.py Normal file
View File

@ -0,0 +1,168 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Koen Van Impe
List all the sightings
Put this script in crontab to run every day
25 4 * * * mispuser /usr/bin/python3 /home/mispuser/PyMISP/examples/show_sightings.py
'''
from pymisp import ExpandedPyMISP
from keys import misp_url, misp_key, misp_verifycert
import sys
import time
from datetime import datetime
import smtplib
import mimetypes
from email.mime.multipart import MIMEMultipart
from email import encoders
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
import argparse
import string
def init(url, key, verifycert):
'''
Template to get MISP module started
'''
return ExpandedPyMISP(url, key, verifycert, 'json')
def set_drift_timestamp(drift_timestamp, drift_timestamp_path):
'''
Save the timestamp in a (local) file
'''
try:
with open(drift_timestamp_path, 'w+') as f:
f.write(str(drift_timestamp))
return True
except IOError:
sys.exit("Unable to write drift_timestamp %s to %s" % (drift_timestamp, drift_timestamp_path))
return False
def get_drift_timestamp(drift_timestamp_path):
'''
From when do we start with the sightings?
'''
try:
with open(drift_timestamp_path) as f:
drift = f.read()
if drift:
drift = int(float(drift))
else:
drift = 0
except IOError:
drift = 0
return drift
def search_sightings(misp, from_timestamp, end_timestamp):
'''
Search all the sightings
'''
completed_sightings = []
try:
found_sightings = misp.search_sightings(date_from=from_timestamp, date_to=end_timestamp)
except Exception as e:
sys.exit('Unable to search for sightings')
if found_sightings is not None:
for s in found_sightings:
if 'Sighting' in s:
sighting = s['Sighting']
if 'attribute_id' in sighting:
attribute_id = sighting['attribute_id']
# Query the attribute and event to get the details
try:
attribute = misp.get_attribute(attribute_id)
except Exception as e:
print("Unable to fetch attribute")
continue
if 'Attribute' in attribute and 'uuid' in attribute['Attribute']:
event_details = misp.get_event(attribute['Attribute']['event_id'])
event_info = event_details['Event']['info']
attribute_uuid = attribute['Attribute']['uuid']
to_ids = attribute['Attribute']['to_ids']
completed_sightings.append({'attribute_uuid': attribute_uuid, 'date_sighting': sighting['date_sighting'], 'source': sighting['source'], 'type': sighting['type'], 'uuid': sighting['uuid'], 'event_id': attribute['Attribute']['event_id'], 'value': attribute['Attribute']['value'], 'attribute_id': attribute['Attribute']['id'], 'event_title': event_info, 'to_ids': to_ids})
else:
continue
return completed_sightings
if __name__ == '__main__':
smtp_from = 'INSERT_FROM'
smtp_to = 'INSERT_TO'
smtp_server = 'localhost'
report_sightings = ''
ts_format = '%Y-%m-%d %H:%M:%S'
drift_timestamp_path = '/home/mispuser/PyMISP/examples/show_sightings.drift'
parser = argparse.ArgumentParser(description="Show all the sightings.")
parser.add_argument('-m', '--mail', action='store_true', help='Mail the report')
parser.add_argument('-o', '--mailoptions', action='store', help='mailoptions: \'smtp_from=INSERT_FROM;smtp_to=INSERT_TO;smtp_server=localhost\'')
args = parser.parse_args()
misp = init(misp_url, misp_key, misp_verifycert)
start_timestamp = get_drift_timestamp(drift_timestamp_path=drift_timestamp_path)
end_timestamp = time.time()
start_timestamp_s = datetime.fromtimestamp(start_timestamp).strftime(ts_format)
end_timestamp_s = datetime.fromtimestamp(end_timestamp).strftime(ts_format)
# Get all attribute sightings
found_sightings = search_sightings(misp, start_timestamp, end_timestamp)
if found_sightings:
for s in found_sightings:
if int(s['type']) == 0:
s_type = 'TP'
else:
s_type = 'FP'
date_sighting = datetime.fromtimestamp(int(s['date_sighting'])).strftime(ts_format)
s_title = s['event_title']
s_title = s_title.replace('\r','').replace('\n','').replace('\t','')
source = s['source']
if not s['source']:
source = 'N/A'
report_sightings = report_sightings + '%s for [%s] (%s) in event [%s] (%s) on %s from %s (to_ids flag: %s) \n' % ( s_type, s['value'], s['attribute_id'], s_title, s['event_id'], date_sighting, source, s['to_ids'])
set_drift_timestamp(end_timestamp, drift_timestamp_path)
else:
report_sightings = 'No sightings found'
# Mail options
if args.mail:
if args.mailoptions:
mailoptions = args.mailoptions.split(';')
for s in mailoptions:
if s.split('=')[0] == 'smtp_from':
smtp_from = s.split('=')[1]
if s.split('=')[0] == 'smtp_to':
smtp_to = s.split('=')[1]
if s.split('=')[0] == 'smtp_server':
smtp_server = s.split('=')[1]
report_sightings_body = 'MISP Sightings report for %s between %s and %s\n-------------------------------------------------------------------------------\n\n' % (misp_url, start_timestamp_s, end_timestamp_s)
report_sightings_body = report_sightings_body + report_sightings
subject = 'Report of sightings between %s and %s' % (start_timestamp_s, end_timestamp_s)
msg = MIMEMultipart()
msg['From'] = smtp_from
msg['To'] = smtp_to
msg['Subject'] = subject
msg.attach(MIMEText(report_sightings_body, 'text'))
server = smtplib.SMTP(smtp_server)
server.sendmail(smtp_from, smtp_to, msg.as_string())
else:
print(report_sightings)

View File

@ -1,25 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key, misp_verifycert
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, misp_verifycert, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Add sighting.')
parser.add_argument("-f", "--json_file", required=True, help="The name of the json file describing the attribute you want to add sighting to.")
args = parser.parse_args()
misp = init(misp_url, misp_key)
misp.sighting_per_json(args.json_file)

View File

@ -4,8 +4,8 @@
* It will also generate a html document with a table (attribute\_table.html) containing count for each type of attribute.
* test\_attribute\_treemap.html is a quick page made to visualize both treemap and table at the same time.
* tags\_count.py is a script that count the number of occurences of every tags in a fetched sample of Events in a given period of time.
* tag\_search.py is a script that count the number of occurences of a given tag in a fetched sample of Events in a given period of time.
* tags\_count.py is a script that count the number of occurrences of every tags in a fetched sample of Events in a given period of time.
* tag\_search.py is a script that count the number of occurrences of a given tag in a fetched sample of Events in a given period of time.
* Events will be fetched from _days_ days ago to today.
* _begindate_ is the beginning of the studied period. If it is later than today, an error will be raised.
* _enddate_ is the end of the studied period. If it is earlier than _begindate_, an error will be raised.

Some files were not shown because too many files have changed in this diff Show More