Merge branch 'master' into 8ear-add-mkdocs-documentation

pull/316/head
Max H 2019-07-31 08:37:19 +02:00 committed by GitHub
commit 90d7485751
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 5194 additions and 2001 deletions

View File

@ -11,6 +11,7 @@ python:
- "3.7-dev"
install:
- sudo apt-get install libzbar0 libzbar-dev libpoppler-cpp-dev
- pip install pipenv
- pipenv install --dev
# MKDOCS
@ -21,14 +22,14 @@ script:
- pid=$!
- sleep 5
- pipenv run nosetests --with-coverage --cover-package=misp_modules
- kill -s INT $pid
- kill -s KILL $pid
- pushd ~/
- pipenv run coverage run -m --parallel-mode --source=misp_modules misp_modules.__init__ -s -l 127.0.0.1 &
- pid=$!
- popd
- sleep 5
- pipenv run nosetests --with-coverage --cover-package=misp_modules
- kill -s INT $pid
- kill -s KILL $pid
- pipenv run flake8 --ignore=E501,W503 misp_modules
# MKDOCS
- make ci_generate_docs

20
Pipfile
View File

@ -25,12 +25,13 @@ pytesseract = "*"
pygeoip = "*"
beautifulsoup4 = "*"
oauth2 = "*"
yara-python = ">=3.8.0"
yara-python = "==3.8.1"
sigmatools = "*"
stix2-patterns = "*"
maclookup = "*"
vulners = "*"
blockchain = "*"
reportlab = "*"
pyintel471 = {editable = true,git = "https://github.com/MISP/PyIntel471.git"}
shodan = "*"
Pillow = "*"
@ -40,6 +41,21 @@ domaintools_api = "*"
misp-modules = {editable = true,path = "."}
pybgpranking = {editable = true,git = "https://github.com/D4-project/BGP-Ranking.git/",subdirectory = "client"}
pyipasnhistory = {editable = true,git = "https://github.com/D4-project/IPASN-History.git/",subdirectory = "client"}
backscatter = "*"
pyzbar = "*"
opencv-python = "*"
np = "*"
ODTReader = {editable = true,git = "https://github.com/cartertemm/ODTReader.git/"}
python-pptx = "*"
python-docx = "*"
ezodf = "*"
pandas = "*"
pandas_ods_reader = "*"
pdftotext = "*"
lxml = "*"
xlrd = "*"
idna-ssl = {markers = "python_version < '3.7'"}
jbxapi = "*"
[requires]
python_version = "3.6"
python_version = "3"

682
Pipfile.lock generated
View File

@ -1,11 +1,11 @@
{
"_meta": {
"hash": {
"sha256": "f501a84bdd41ca21a2af020278ce030985cccd5f2f5683cd075797be4523587d"
"sha256": "3b1ae107ffee673cfabae67742774ee8ebdc3b82313608b529c2c4cf4a41ddc9"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.6"
"python_version": "3"
},
"sources": [
{
@ -59,19 +59,27 @@
},
"attrs": {
"hashes": [
"sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69",
"sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"
"sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
"sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
],
"version": "==18.2.0"
"version": "==19.1.0"
},
"backscatter": {
"hashes": [
"sha256:7a0d1aa3661635de81e2a09b15d53e35cbe399a111cc58a70925f80e6874abd3",
"sha256:afb0efcf5d2551dac953ec4c38fb710b274b8e811775650e02c1ef42cafb14c8"
],
"index": "pypi",
"version": "==0.2.4"
},
"beautifulsoup4": {
"hashes": [
"sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
"sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
"sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
"sha256:05668158c7b85b791c5abde53e50265e16f98ad601c402ba44d70f96c4159612",
"sha256:25288c9e176f354bf277c0a10aa96c782a6a18a17122dba2e8cec4a97e03343b",
"sha256:f040590be10520f2ea4c2ae8c3dae441c7cfff5308ec9d58a0ec0c1b8f81d469"
],
"index": "pypi",
"version": "==4.7.1"
"version": "==4.8.0"
},
"blockchain": {
"hashes": [
@ -82,10 +90,10 @@
},
"certifi": {
"hashes": [
"sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
"sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
"sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
"sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
],
"version": "==2018.11.29"
"version": "==2019.6.16"
},
"chardet": {
"hashes": [
@ -103,10 +111,10 @@
},
"click-plugins": {
"hashes": [
"sha256:b1ee1ccc9421c73007fe290680d97984eb6eaf5f4512b7620c6aa46031d6cb6b",
"sha256:dfed74b5063546a137de99baaaf742b4de4337ad2b3e1df5ec7c8a256adc0847"
"sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b",
"sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"
],
"version": "==1.0.4"
"version": "==1.1.1"
},
"colorama": {
"hashes": [
@ -115,6 +123,13 @@
],
"version": "==0.4.1"
},
"deprecated": {
"hashes": [
"sha256:a515c4cf75061552e0284d123c3066fbbe398952c87333a92b8fc3dd8e4f9cc1",
"sha256:b07b414c8aac88f60c1d837d21def7e83ba711052e03b3cbaff27972567a8f8d"
],
"version": "==1.2.6"
},
"dnspython": {
"hashes": [
"sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01",
@ -142,6 +157,13 @@
],
"version": "==0.9"
},
"ezodf": {
"hashes": [
"sha256:000da534f689c6d55297a08f9e2ed7eada9810d194d31d164388162fb391122d"
],
"index": "pypi",
"version": "==0.3.2"
},
"future": {
"hashes": [
"sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"
@ -150,9 +172,10 @@
},
"httplib2": {
"hashes": [
"sha256:f61fb838a94ce3b349aa32c92fd8430f7e3511afdb18bf9640d647e30c90a6d6"
"sha256:158fbd0ffbba536829d664bf3f32c4f45df41f8f791663665162dfaf21ffd075",
"sha256:d1146939d270f1f1eb8cbf8f5aa72ff37d897faccca448582bb1e180aeb4c6b2"
],
"version": "==0.12.0"
"version": "==0.13.0"
},
"idna": {
"hashes": [
@ -165,6 +188,7 @@
"hashes": [
"sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"
],
"index": "pypi",
"markers": "python_version < '3.7'",
"version": "==1.1.0"
},
@ -175,12 +199,49 @@
],
"version": "==0.6.0"
},
"jbxapi": {
"hashes": [
"sha256:b06d7dc99af51eff657b1bb5d96489dda6af6164fae934d9de8b00795a4bd5fd"
],
"index": "pypi",
"version": "==3.2.0"
},
"jsonschema": {
"hashes": [
"sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08",
"sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02"
"sha256:0c0a81564f181de3212efa2d17de1910f8732fa1b71c42266d983cd74304e20d",
"sha256:a5f6559964a3851f59040d3b961de5e68e70971afb88ba519d27e6a039efff1a"
],
"version": "==2.6.0"
"version": "==3.0.1"
},
"lxml": {
"hashes": [
"sha256:06c7616601430aa140a69f97e3116308fffe0848f543b639a5ec2e8920ae72fd",
"sha256:177202792f9842374a8077735c69c41a4282183f7851443d2beb8ee310720819",
"sha256:19317ad721ceb9e39847d11131903931e2794e447d4751ebb0d9236f1b349ff2",
"sha256:36d206e62f3e5dbaafd4ec692b67157e271f5da7fd925fda8515da675eace50d",
"sha256:387115b066c797c85f9861a9613abf50046a15aac16759bc92d04f94acfad082",
"sha256:3ce1c49d4b4a7bc75fb12acb3a6247bb7a91fe420542e6d671ba9187d12a12c2",
"sha256:4d2a5a7d6b0dbb8c37dab66a8ce09a8761409c044017721c21718659fa3365a1",
"sha256:58d0a1b33364d1253a88d18df6c0b2676a1746d27c969dc9e32d143a3701dda5",
"sha256:62a651c618b846b88fdcae0533ec23f185bb322d6c1845733f3123e8980c1d1b",
"sha256:69ff21064e7debc9b1b1e2eee8c2d686d042d4257186d70b338206a80c5bc5ea",
"sha256:7060453eba9ba59d821625c6af6a266bd68277dce6577f754d1eb9116c094266",
"sha256:7d26b36a9c4bce53b9cfe42e67849ae3c5c23558bc08363e53ffd6d94f4ff4d2",
"sha256:83b427ad2bfa0b9705e02a83d8d607d2c2f01889eb138168e462a3a052c42368",
"sha256:923d03c84534078386cf50193057aae98fa94cace8ea7580b74754493fda73ad",
"sha256:b773715609649a1a180025213f67ffdeb5a4878c784293ada300ee95a1f3257b",
"sha256:baff149c174e9108d4a2fee192c496711be85534eab63adb122f93e70aa35431",
"sha256:bca9d118b1014b4c2d19319b10a3ebed508ff649396ce1855e1c96528d9b2fa9",
"sha256:ce580c28845581535dc6000fc7c35fdadf8bea7ccb57d6321b044508e9ba0685",
"sha256:d34923a569e70224d88e6682490e24c842907ba2c948c5fd26185413cbe0cd96",
"sha256:dd9f0e531a049d8b35ec5e6c68a37f1ba6ec3a591415e6804cbdf652793d15d7",
"sha256:ecb805cbfe9102f3fd3d2ef16dfe5ae9e2d7a7dfbba92f4ff1e16ac9784dbfb0",
"sha256:ede9aad2197a0202caff35d417b671f5f91a3631477441076082a17c94edd846",
"sha256:ef2d1fc370400e0aa755aab0b20cf4f1d0e934e7fd5244f3dd4869078e4942b9",
"sha256:f2fec194a49bfaef42a548ee657362af5c7a640da757f6f452a35da7dd9f923c"
],
"index": "pypi",
"version": "==4.3.4"
},
"maclookup": {
"hashes": [
@ -228,6 +289,41 @@
],
"version": "==4.5.2"
},
"np": {
"hashes": [
"sha256:781265283f3823663ad8fb48741aae62abcf4c78bc19f908f8aa7c1d3eb132f8"
],
"index": "pypi",
"version": "==1.0.2"
},
"numpy": {
"hashes": [
"sha256:0778076e764e146d3078b17c24c4d89e0ecd4ac5401beff8e1c87879043a0633",
"sha256:141c7102f20abe6cf0d54c4ced8d565b86df4d3077ba2343b61a6db996cefec7",
"sha256:14270a1ee8917d11e7753fb54fc7ffd1934f4d529235beec0b275e2ccf00333b",
"sha256:27e11c7a8ec9d5838bc59f809bfa86efc8a4fd02e58960fa9c49d998e14332d5",
"sha256:2a04dda79606f3d2f760384c38ccd3d5b9bb79d4c8126b67aff5eb09a253763e",
"sha256:3c26010c1b51e1224a3ca6b8df807de6e95128b0908c7e34f190e7775455b0ca",
"sha256:52c40f1a4262c896420c6ea1c6fda62cf67070e3947e3307f5562bd783a90336",
"sha256:6e4f8d9e8aa79321657079b9ac03f3cf3fd067bf31c1cca4f56d49543f4356a5",
"sha256:7242be12a58fec245ee9734e625964b97cf7e3f2f7d016603f9e56660ce479c7",
"sha256:7dc253b542bfd4b4eb88d9dbae4ca079e7bf2e2afd819ee18891a43db66c60c7",
"sha256:94f5bd885f67bbb25c82d80184abbf7ce4f6c3c3a41fbaa4182f034bba803e69",
"sha256:a89e188daa119ffa0d03ce5123dee3f8ffd5115c896c2a9d4f0dbb3d8b95bfa3",
"sha256:ad3399da9b0ca36e2f24de72f67ab2854a62e623274607e37e0ce5f5d5fa9166",
"sha256:b0348be89275fd1d4c44ffa39530c41a21062f52299b1e3ee7d1c61f060044b8",
"sha256:b5554368e4ede1856121b0dfa35ce71768102e4aa55e526cb8de7f374ff78722",
"sha256:cbddc56b2502d3f87fda4f98d948eb5b11f36ff3902e17cb6cc44727f2200525",
"sha256:d79f18f41751725c56eceab2a886f021d70fd70a6188fd386e29a045945ffc10",
"sha256:dc2ca26a19ab32dc475dbad9dfe723d3a64c835f4c23f625c2b6566ca32b9f29",
"sha256:dd9bcd4f294eb0633bb33d1a74febdd2b9018b8b8ed325f861fffcd2c7660bb8",
"sha256:e8baab1bc7c9152715844f1faca6744f2416929de10d7639ed49555a85549f52",
"sha256:ec31fe12668af687b99acf1567399632a7c47b0e17cfb9ae47c098644ef36797",
"sha256:f12b4f7e2d8f9da3141564e6737d79016fe5336cc92de6814eba579744f65b0a",
"sha256:f58ac38d5ca045a377b3b377c84df8175ab992c970a53332fa8ac2373df44ff7"
],
"version": "==1.16.4"
},
"oauth2": {
"hashes": [
"sha256:15b5c42301f46dd63113f1214b0d81a8b16254f65a86d3c32a1b52297f3266e6",
@ -236,67 +332,138 @@
"index": "pypi",
"version": "==1.9.0.post1"
},
"passivetotal": {
"odtreader": {
"editable": true,
"git": "https://github.com/cartertemm/ODTReader.git/",
"ref": "49d6938693f6faa3ff09998f86dba551ae3a996b"
},
"opencv-python": {
"hashes": [
"sha256:d745a6519ec04e3a354682978ebf07778bf7602beac30307cbad075ff1a4418d"
"sha256:1703a296a96d3d46615e5053f224867977accb4240bcaa0fcabcb0768bf5ac13",
"sha256:1777ce7535ee7a1995cae168a107a1320e9df13648b930e72a1a2c2eccd64cda",
"sha256:1e5520482fb18fbd64d079e7f17ac0018f195fd75f6360a53bb82d7903106b50",
"sha256:25522dcf2529614750a71112a6659759080b4bdc2323f19d47f4d895960fd796",
"sha256:2af5f2842ad44c65ae2647377e0ff198719e1a1cfc9c6a19bc0c525c035d4bd8",
"sha256:31ec48d7eca13fc25c287dea7cecab453976e372cad8f50d55c054a247efda21",
"sha256:47cf48ff5dbd554e9f58cc9e98cf0b5de3f6a971172612bffa06bc5fb79ce872",
"sha256:494f98366bb5d6c2ac7e50e6617139f353704fd97a6d12ec9d392e72817d5cb0",
"sha256:4a9845870739e640e3350a8d98d511c92c087fe3d66090e83be7bf94e0ac64f7",
"sha256:4ac29cc0847d948a6636899014e84e165c30cc8779d6218394d44363462a01ce",
"sha256:5857ace03b7854221abf8072462d306c2c2ce4e366190b21d90ee8ee8aaf5bb4",
"sha256:5b4a23d99d5a2874767034466f5a8fd37b9f93ac14955a01b1a208983c76b9ad",
"sha256:734d87a5021c037064beb62133e135e66c7128e401a63b8b842b809ae2093749",
"sha256:78005c1c5d15ef4e32e0f485557bd15b5b6d87f49c19db7fe3e9246a61ebe7e4",
"sha256:81ae2283225c5c52fc3d72debd4241c30ccff2bb922578bf7867f9851cce3acb",
"sha256:88dbf900f297fdae0f62b899d6a784d8868ec2135854c5f8a9abbad00a6f0c5b",
"sha256:8c98ea7b8d327a31cd6028782a06147d0e0329ae8e829e881fb5d02f7ed8aec9",
"sha256:937d4686fef6967921145290f5b50c01c00c5b5d3542a6519e8a85cd88448723",
"sha256:a057958c0e362b3c4f03b9af1cbdb6d5af035fd22ecd7fd794eba8fdeb049eb8",
"sha256:c41eab31fa2c641226c6187caa391a688d064c99f078d604574f1912296b771f",
"sha256:cf4f7e62d1f80d1fa85a1693a3500def5cde54b2b75212b3609e552e4c25acfb",
"sha256:d90d60143e18334330c149f293071c9f2f3c79c896f33dc4ec65099e58baaaa7",
"sha256:db3106b7ca86999a7bd1f2fcc93e49314e5e6e451356774e421a69428df5020b",
"sha256:dbaf264db56f4771dfac6624f438bc4dc670aa94f61a6138848fcab7e9e77380",
"sha256:e65206c4cf651dc9cf0829962fae8bec986767c9f123d6a1ad17f9356bf7257e",
"sha256:eac94ddc78c58e891cff7180274317dad2938a4ddfc6ced1c04846c7f50e77e9",
"sha256:f2e828711f044a965509c862b3a59b3181e9c56c145a950cb53d43fec54e66d2"
],
"index": "pypi",
"version": "==1.0.30"
"version": "==4.1.0.25"
},
"pandas": {
"hashes": [
"sha256:074a032f99bb55d178b93bd98999c971542f19317829af08c99504febd9e9b8b",
"sha256:20f1728182b49575c2f6f681b3e2af5fac9e84abdf29488e76d569a7969b362e",
"sha256:2745ba6e16c34d13d765c3657bb64fa20a0e2daf503e6216a36ed61770066179",
"sha256:32c44e5b628c48ba17703f734d59f369d4cdcb4239ef26047d6c8a8bfda29a6b",
"sha256:3b9f7dcee6744d9dcdd53bce19b91d20b4311bf904303fa00ef58e7df398e901",
"sha256:544f2033250980fb6f069ce4a960e5f64d99b8165d01dc39afd0b244eeeef7d7",
"sha256:58f9ef68975b9f00ba96755d5702afdf039dea9acef6a0cfd8ddcde32918a79c",
"sha256:9023972a92073a495eba1380824b197ad1737550fe1c4ef8322e65fe58662888",
"sha256:914341ad2d5b1ea522798efa4016430b66107d05781dbfe7cf05eba8f37df995",
"sha256:9d151bfb0e751e2c987f931c57792871c8d7ff292bcdfcaa7233012c367940ee",
"sha256:b932b127da810fef57d427260dde1ad54542c136c44b227a1e367551bb1a684b",
"sha256:cfb862aa37f4dd5be0730731fdb8185ac935aba8b51bf3bd035658111c9ee1c9",
"sha256:de7ecb4b120e98b91e8a2a21f186571266a8d1faa31d92421e979c7ca67d8e5c",
"sha256:df7e1933a0b83920769611c5d6b9a1bf301e3fa6a544641c6678c67621fe9843"
],
"index": "pypi",
"version": "==0.25.0"
},
"pandas-ods-reader": {
"hashes": [
"sha256:d2d6e4f9cd2850da32808bbc68d433a337911058387992026d3987ead1f4a7c8",
"sha256:d4d6781cc46e782e265b48681416f636e7659343dec948c6fccc4236af6fa1e6"
],
"index": "pypi",
"version": "==0.0.7"
},
"passivetotal": {
"hashes": [
"sha256:2944974d380a41f19f8fbb3d7cbfc8285479eb81092940b57bf0346d66706a05",
"sha256:a0cbea84b0bd6e9f3694ddeb447472b3d6f09e28940a7a0388456b8cf6a8e478",
"sha256:e35bf2cbccb385795a67d66f180d14ce9136cf1611b1c3da8a1055a1aced6264"
],
"index": "pypi",
"version": "==1.0.31"
},
"pdftotext": {
"hashes": [
"sha256:e3ad11efe0aa22cbfc46aa1296b2ea5a52ad208b778288311f2801adef178ccb"
],
"index": "pypi",
"version": "==2.1.1"
},
"pillow": {
"hashes": [
"sha256:051de330a06c99d6f84bcf582960487835bcae3fc99365185dc2d4f65a390c0e",
"sha256:0ae5289948c5e0a16574750021bd8be921c27d4e3527800dc9c2c1d2abc81bf7",
"sha256:0b1efce03619cdbf8bcc61cfae81fcda59249a469f31c6735ea59badd4a6f58a",
"sha256:163136e09bd1d6c6c6026b0a662976e86c58b932b964f255ff384ecc8c3cefa3",
"sha256:18e912a6ccddf28defa196bd2021fe33600cbe5da1aa2f2e2c6df15f720b73d1",
"sha256:24ec3dea52339a610d34401d2d53d0fb3c7fd08e34b20c95d2ad3973193591f1",
"sha256:267f8e4c0a1d7e36e97c6a604f5b03ef58e2b81c1becb4fccecddcb37e063cc7",
"sha256:3273a28734175feebbe4d0a4cde04d4ed20f620b9b506d26f44379d3c72304e1",
"sha256:4c678e23006798fc8b6f4cef2eaad267d53ff4c1779bd1af8725cc11b72a63f3",
"sha256:4d4bc2e6bb6861103ea4655d6b6f67af8e5336e7216e20fff3e18ffa95d7a055",
"sha256:505738076350a337c1740a31646e1de09a164c62c07db3b996abdc0f9d2e50cf",
"sha256:5233664eadfa342c639b9b9977190d64ad7aca4edc51a966394d7e08e7f38a9f",
"sha256:5d95cb9f6cced2628f3e4de7e795e98b2659dfcc7176ab4a01a8b48c2c2f488f",
"sha256:7eda4c737637af74bac4b23aa82ea6fbb19002552be85f0b89bc27e3a762d239",
"sha256:801ddaa69659b36abf4694fed5aa9f61d1ecf2daaa6c92541bbbbb775d97b9fe",
"sha256:825aa6d222ce2c2b90d34a0ea31914e141a85edefc07e17342f1d2fdf121c07c",
"sha256:9c215442ff8249d41ff58700e91ef61d74f47dfd431a50253e1a1ca9436b0697",
"sha256:a3d90022f2202bbb14da991f26ca7a30b7e4c62bf0f8bf9825603b22d7e87494",
"sha256:a631fd36a9823638fe700d9225f9698fb59d049c942d322d4c09544dc2115356",
"sha256:a6523a23a205be0fe664b6b8747a5c86d55da960d9586db039eec9f5c269c0e6",
"sha256:a756ecf9f4b9b3ed49a680a649af45a8767ad038de39e6c030919c2f443eb000",
"sha256:b117287a5bdc81f1bac891187275ec7e829e961b8032c9e5ff38b70fd036c78f",
"sha256:ba04f57d1715ca5ff74bb7f8a818bf929a204b3b3c2c2826d1e1cc3b1c13398c",
"sha256:cd878195166723f30865e05d87cbaf9421614501a4bd48792c5ed28f90fd36ca",
"sha256:cee815cc62d136e96cf76771b9d3eb58e0777ec18ea50de5cfcede8a7c429aa8",
"sha256:d1722b7aa4b40cf93ac3c80d3edd48bf93b9208241d166a14ad8e7a20ee1d4f3",
"sha256:d7c1c06246b05529f9984435fc4fa5a545ea26606e7f450bdbe00c153f5aeaad",
"sha256:e9c8066249c040efdda84793a2a669076f92a301ceabe69202446abb4c5c5ef9",
"sha256:f227d7e574d050ff3996049e086e1f18c7bd2d067ef24131e50a1d3fe5831fbc",
"sha256:fc9a12aad714af36cf3ad0275a96a733526571e52710319855628f476dcb144e"
"sha256:0804f77cb1e9b6dbd37601cee11283bba39a8d44b9ddb053400c58e0c0d7d9de",
"sha256:0ab7c5b5d04691bcbd570658667dd1e21ca311c62dcfd315ad2255b1cd37f64f",
"sha256:0b3e6cf3ea1f8cecd625f1420b931c83ce74f00c29a0ff1ce4385f99900ac7c4",
"sha256:365c06a45712cd723ec16fa4ceb32ce46ad201eb7bbf6d3c16b063c72b61a3ed",
"sha256:38301fbc0af865baa4752ddae1bb3cbb24b3d8f221bf2850aad96b243306fa03",
"sha256:3aef1af1a91798536bbab35d70d35750bd2884f0832c88aeb2499aa2d1ed4992",
"sha256:3fe0ab49537d9330c9bba7f16a5f8b02da615b5c809cdf7124f356a0f182eccd",
"sha256:45a619d5c1915957449264c81c008934452e3fd3604e36809212300b2a4dab68",
"sha256:49f90f147883a0c3778fd29d3eb169d56416f25758d0f66775db9184debc8010",
"sha256:571b5a758baf1cb6a04233fb23d6cf1ca60b31f9f641b1700bfaab1194020555",
"sha256:5ac381e8b1259925287ccc5a87d9cf6322a2dc88ae28a97fe3e196385288413f",
"sha256:6153db744a743c0c8c91b8e3b9d40e0b13a5d31dbf8a12748c6d9bfd3ddc01ad",
"sha256:6fd63afd14a16f5d6b408f623cc2142917a1f92855f0df997e09a49f0341be8a",
"sha256:70acbcaba2a638923c2d337e0edea210505708d7859b87c2bd81e8f9902ae826",
"sha256:70b1594d56ed32d56ed21a7fbb2a5c6fd7446cdb7b21e749c9791eac3a64d9e4",
"sha256:76638865c83b1bb33bcac2a61ce4d13c17dba2204969dedb9ab60ef62bede686",
"sha256:7b2ec162c87fc496aa568258ac88631a2ce0acfe681a9af40842fc55deaedc99",
"sha256:7cee2cef07c8d76894ebefc54e4bb707dfc7f258ad155bd61d87f6cd487a70ff",
"sha256:7d16d4498f8b374fc625c4037742fbdd7f9ac383fd50b06f4df00c81ef60e829",
"sha256:b50bc1780681b127e28f0075dfb81d6135c3a293e0c1d0211133c75e2179b6c0",
"sha256:bd0582f831ad5bcad6ca001deba4568573a4675437db17c4031939156ff339fa",
"sha256:cfd40d8a4b59f7567620410f966bb1f32dc555b2b19f82a91b147fac296f645c",
"sha256:e3ae410089de680e8f84c68b755b42bc42c0ceb8c03dbea88a5099747091d38e",
"sha256:e9046e559c299b395b39ac7dbf16005308821c2f24a63cae2ab173bd6aa11616",
"sha256:ef6be704ae2bc8ad0ebc5cb850ee9139493b0fc4e81abcc240fb392a63ebc808",
"sha256:f8dc19d92896558f9c4317ee365729ead9d7bbcf2052a9a19a3ef17abbb8ac5b"
],
"index": "pypi",
"version": "==5.4.1"
"version": "==6.1.0"
},
"psutil": {
"hashes": [
"sha256:04d2071100aaad59f9bcbb801be2125d53b2e03b1517d9fed90b45eea51d297e",
"sha256:1aba93430050270750d046a179c5f3d6e1f5f8b96c20399ba38c596b28fc4d37",
"sha256:3ac48568f5b85fee44cd8002a15a7733deca056a191d313dbf24c11519c0c4a8",
"sha256:96f3fdb4ef7467854d46ad5a7e28eb4c6dc6d455d751ddf9640cd6d52bdb03d7",
"sha256:b755be689d6fc8ebc401e1d5ce5bac867e35788f10229e166338484eead51b12",
"sha256:c8ee08ad1b716911c86f12dc753eb1879006224fd51509f077987bb6493be615",
"sha256:d0c4230d60376aee0757d934020b14899f6020cd70ef8d2cb4f228b6ffc43e8f",
"sha256:d23f7025bac9b3e38adc6bd032cdaac648ac0074d18e36950a04af35458342e8",
"sha256:f0fcb7d3006dd4d9ccf3ccd0595d44c6abbfd433ec31b6ca177300ee3f19e54e"
"sha256:028a1ec3c6197eadd11e7b46e8cc2f0720dc18ac6d7aabdb8e8c0d6c9704f000",
"sha256:503e4b20fa9d3342bcf58191bbc20a4a5ef79ca7df8972e6197cc14c5513e73d",
"sha256:863a85c1c0a5103a12c05a35e59d336e1d665747e531256e061213e2e90f63f3",
"sha256:954f782608bfef9ae9f78e660e065bd8ffcfaea780f9f2c8a133bb7cb9e826d7",
"sha256:b6e08f965a305cd84c2d07409bc16fbef4417d67b70c53b299116c5b895e3f45",
"sha256:bc96d437dfbb8865fc8828cf363450001cb04056bbdcdd6fc152c436c8a74c61",
"sha256:cf49178021075d47c61c03c0229ac0c60d5e2830f8cab19e2d88e579b18cdb76",
"sha256:d5350cb66690915d60f8b233180f1e49938756fb2d501c93c44f8fb5b970cc63",
"sha256:eba238cf1989dfff7d483c029acb0ac4fcbfc15de295d682901f0e2497e6781a"
],
"version": "==5.5.0"
"version": "==5.6.3"
},
"pybgpranking": {
"editable": true,
"git": "https://github.com/D4-project/BGP-Ranking.git/",
"ref": "7e698f87366e6f99b4d0d11852737db28e3ddc62",
"ref": "331bdf499c4dc19c3404e85ce0dc1ff161d35250",
"subdirectory": "client"
},
"pydnstrails": {
@ -327,32 +494,33 @@
"pyipasnhistory": {
"editable": true,
"git": "https://github.com/D4-project/IPASN-History.git/",
"ref": "e846cd36fe1ed6b22f60890bba89f84e61b62e59",
"ref": "32b3bb13967527a4a42eb56f226bf03a04da3cc8",
"subdirectory": "client"
},
"pymisp": {
"editable": true,
"git": "https://github.com/MISP/PyMISP.git",
"ref": "2c877f2aec11b7f5d2f23dfc5ce7398b2ce33b48"
"ref": "b5226a959c72e5b414a3ce297d3865bbb9fd0da2"
},
"pyonyphe": {
"editable": true,
"git": "https://github.com/sebdraven/pyonyphe",
"ref": "66329baeee7cab844f2203c047c2551828eaf14d"
"ref": "cbb0168d5cb28a9f71f7ab3773164a7039ccdb12"
},
"pyparsing": {
"hashes": [
"sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a",
"sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3"
"sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
"sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
],
"version": "==2.3.1"
"version": "==2.4.0"
},
"pypdns": {
"hashes": [
"sha256:0356360156dd26d2cf27a415a10ff2bd1ff1d2eb3b2dd51b35553d60b87fd328"
"sha256:349ab1033e34a60fa0c4626b3432f5202c174656955fdf330986380c9a97cf3e",
"sha256:c609678d47255a240c1e3f29a757355f610a8394ec22f21a07853360ebee6f20"
],
"index": "pypi",
"version": "==1.3"
"version": "==1.4.1"
},
"pypssl": {
"hashes": [
@ -361,35 +529,71 @@
"index": "pypi",
"version": "==2.1"
},
"pyrsistent": {
"hashes": [
"sha256:50cffebc87ca91b9d4be2dcc2e479272bcb466b5a0487b6c271f7ddea6917e14"
],
"version": "==0.15.3"
},
"pytesseract": {
"hashes": [
"sha256:11c20321595b6e2e904b594633edf1a717212b13bac7512986a2d807b8849770"
"sha256:46363b300d6890d24782852e020c06e96344529fead98f3b9b8506c82c37db6f"
],
"index": "pypi",
"version": "==0.2.6"
"version": "==0.2.7"
},
"python-dateutil": {
"hashes": [
"sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93",
"sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"
"sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
"sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
],
"version": "==2.7.5"
"version": "==2.8.0"
},
"python-docx": {
"hashes": [
"sha256:bc76ecac6b2d00ce6442a69d03a6f35c71cd72293cd8405a7472dfe317920024"
],
"index": "pypi",
"version": "==0.8.10"
},
"python-pptx": {
"hashes": [
"sha256:a857d69e52d7e8a8fb32fca8182fdd4a3c68c689de8d4e4460e9b4a95efa7bc4"
],
"index": "pypi",
"version": "==0.6.18"
},
"pytz": {
"hashes": [
"sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda",
"sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"
],
"version": "==2019.1"
},
"pyyaml": {
"hashes": [
"sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b",
"sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf",
"sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a",
"sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3",
"sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1",
"sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1",
"sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613",
"sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04",
"sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f",
"sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537",
"sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531"
"sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3",
"sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043",
"sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7",
"sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265",
"sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391",
"sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778",
"sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225",
"sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955",
"sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e",
"sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190",
"sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd"
],
"version": "==3.13"
"version": "==5.1.1"
},
"pyzbar": {
"hashes": [
"sha256:0e204b904e093e5e75aa85e0203bb0e02888105732a509b51f31cff400f34265",
"sha256:496249b546be70ec98c0ff0ad9151e73daaffff129266df86150a15dcd8dac4c",
"sha256:7d6c01d2c0a352fa994aa91b5540d1caeaeaac466656eb41468ca5df33be9f2e"
],
"index": "pypi",
"version": "==0.1.8"
},
"rdflib": {
"hashes": [
@ -400,39 +604,73 @@
},
"redis": {
"hashes": [
"sha256:74c892041cba46078ae1ef845241548baa3bd3634f9a6f0f952f006eb1619c71",
"sha256:7ba8612bbfd966dea8c62322543fed0095da2834dbd5a7c124afbc617a156aa7"
"sha256:6946b5dca72e86103edc8033019cc3814c031232d339d5f4533b02ea85685175",
"sha256:8ca418d2ddca1b1a850afa1680a7d2fd1f3322739271de4b704e0d4668449273"
],
"version": "==3.1.0"
"version": "==3.2.1"
},
"reportlab": {
"hashes": [
"sha256:065bca611829da371df97cec255239a2972119afbab57528022df8b41881a3f6",
"sha256:329843edd93293a96b99b2e9c226066a9ed27f0f881b4933536577e1dab898cf",
"sha256:393140710488b7ffda2762a08f63671dcccdbccfed0e4c8e8ec77e5a355080a1",
"sha256:3c778843f50981a1569539120f0cfa2be0ca7a80e4c61bdfc88a74c323b90b00",
"sha256:44ab0741f40899936e7cc85b0a19614a483da4b476102ac58d1ac20ef6da9fc3",
"sha256:4582272135bd2f355a616b4ac08310947d88b0d3e4f474be16175d89fa200c0d",
"sha256:47612270365e21581178ebbb91edabf9b3c6b4519baf2052d3f4cbe302e3ea76",
"sha256:4f8c5e65fcfa111be309228efca92ba17f329d3dbf3bbe055094fe907ab5d4c8",
"sha256:4ff4942cb1ca1f70a890fd35c7e1d0657d08dbdf6bdb5bc2c0dd3e30a6301cf7",
"sha256:5b109b347ae391963ef846e41c4c65c2bc99e81f1d4eeff687635b73ee952bf5",
"sha256:5cbd56e8dea652f73f728578cb3dbc57bd100f308012fe90596085520d2cb25a",
"sha256:5dddc51b5848a2d0a6fe47e96496220a305e7d796d4a6973cc984ab1d8160ff7",
"sha256:6c81ee26753fa09062d8404f6340eefb02849608b619e3843e0d17a7cda8798f",
"sha256:706ffb184c4cdeabcaef3b9eaba86cbf7684467c32d308ed908917fc679f86c8",
"sha256:794499adc5ad419e064523f13b0782ee2860180e79c8cd02379c4c957e1f0abb",
"sha256:8b7fcc98b0aed3e3e4f134f4d5a498bb9c068fdce6c6b2a9f103d3a339efd8d1",
"sha256:8bc0fe11be68207866902ee96eec6645d574d82fd6abd93c8bcdcd57ac1b4040",
"sha256:92f01e16fe65e51ffa2fe0e37da697c8b8f5d892605c05394c883a866a11efc1",
"sha256:a162484b22c52ab701b74f8c35b2a14f9ecf9694f2ab149fb38f377069743e69",
"sha256:a30b42d6c5ffe1ce7c677328a47386f861c3bb9057bf4de5eb0f97fe17e9b3ba",
"sha256:a7a63d35c59af1d134ec43bab75070af86e59c412289198de3788765627a611c",
"sha256:aee6aa362cbaf9abc406944064a887a69f6f5606fa54abaecf98a78459d1d954",
"sha256:ba537b091614f3839716fb7b418e157216e213a0eab3fe7db2dfbf198fb61224",
"sha256:be8f70ec622b98ef830af5591ab4c0b062a67507a19ca43327da5ff350435b43",
"sha256:c380bcb032736d45bd9a90f4208547a679b7fe2327fc1187a73a2d9b58988f1d",
"sha256:cd2fdcd1e31113878d5c5c9ae17a34368a13e1c9e12d586b66b77ff806371e23",
"sha256:f59d772b504035b1468544a11269ee27648ddb2fae1efddd45ce050da2527813",
"sha256:ff1570bf8ad010c408f72822248ad2276185d473ab9a64c70ad2ec4427dda052"
],
"index": "pypi",
"version": "==3.5.23"
},
"requests": {
"hashes": [
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
],
"index": "pypi",
"version": "==2.21.0"
"version": "==2.22.0"
},
"requests-cache": {
"hashes": [
"sha256:e9270030becc739b0a7f7f834234c73a878b2d794122bf76f40055a22419eb67",
"sha256:fe561ca119879bbcfb51f03a35e35b425e18f338248e59fd5cf2166c77f457a2"
"sha256:6822f788c5ee248995c4bfbd725de2002ad710182ba26a666e85b64981866060",
"sha256:73a7211870f7d67af5fd81cad2f67cfe1cd3eb4ee6a85155e07613968cc72dfc"
],
"version": "==0.4.13"
"version": "==0.5.0"
},
"shodan": {
"hashes": [
"sha256:c40abb6ff2fd66bdee9f773746fb961eefdfaa8e720a07cb12fb70def136268d"
"sha256:13953527d0a1a86d2346631143066533a6f804551a77e40284d1dc53ce28bd30"
],
"index": "pypi",
"version": "==1.10.4"
"version": "==1.14.0"
},
"sigmatools": {
"hashes": [
"sha256:98c9897f27e7c99f398bff537bb6b0259599177d955f8b60a22db1b246f9cb0b"
"sha256:f28838a26f8a0be066da38dd65b70e3241d109037029bb69069079e2fa3dfdbc"
],
"index": "pypi",
"version": "==0.7.1"
"version": "==0.11"
},
"six": {
"hashes": [
@ -443,19 +681,19 @@
},
"soupsieve": {
"hashes": [
"sha256:466910df7561796a60748826781ebe9a888f7a1668a636ae86783f44d10aae73",
"sha256:87db12ae79194f0ff9808d2b1641c4f031ae39ffa3cab6b907ea7c1e5e5ed445"
"sha256:72b5f1aea9101cf720a36bb2327ede866fd6f1a07b1e87c92a1cc18113cbc946",
"sha256:e4e9c053d59795e440163733a7fec6c5972210e1790c507e4c7b051d6c5259de"
],
"version": "==1.7.3"
"version": "==1.9.2"
},
"sparqlwrapper": {
"hashes": [
"sha256:2a95fdede2833be660b81092934c4a0054ff85f2693098556762a2759ea486f1",
"sha256:7f4c8d38ea1bfcffbc358c9a05de35a3fd7152cc3e8ea57963ee7a0a242f7a5e",
"sha256:acf6d60f0a3684cb673653b07871acb0c350a974b891f20f8ac94926ff9eb2ff"
"sha256:14ec551f0d60b4a496ffcc31f15337e844c085b8ead8cbe9a7178748a6de3794",
"sha256:21928e7a97f565e772cdeeb0abad428960f4307e3a13dbdd8f6d3da8a6a506c9",
"sha256:abc3e7eadcad32fa69a85c003853e2f6f73bda6cc999853838f401a5a1ea1109"
],
"index": "pypi",
"version": "==1.8.2"
"version": "==1.8.4"
},
"stix2-patterns": {
"hashes": [
@ -464,17 +702,23 @@
"index": "pypi",
"version": "==1.1.0"
},
"tabulate": {
"hashes": [
"sha256:8af07a39377cee1103a5c8b3330a421c2d99b9141e9cc5ddd2e3263fea416943"
],
"version": "==0.8.3"
},
"tornado": {
"hashes": [
"sha256:0662d28b1ca9f67108c7e3b77afabfb9c7e87bde174fbda78186ecedc2499a9d",
"sha256:4e5158d97583502a7e2739951553cbd88a72076f152b4b11b64b9a10c4c49409",
"sha256:732e836008c708de2e89a31cb2fa6c0e5a70cb60492bee6f1ea1047500feaf7f",
"sha256:8154ec22c450df4e06b35f131adc4f2f3a12ec85981a203301d310abf580500f",
"sha256:8e9d728c4579682e837c92fdd98036bd5cdefa1da2aaf6acf26947e6dd0c01c5",
"sha256:d4b3e5329f572f055b587efc57d29bd051589fb5a43ec8898c77a47ec2fa2bbb",
"sha256:e5f2585afccbff22390cddac29849df463b252b711aa2ce7c5f3f342a5b3b444"
"sha256:349884248c36801afa19e342a77cc4458caca694b0eda633f5878e458a44cb2c",
"sha256:398e0d35e086ba38a0427c3b37f4337327231942e731edaa6e9fd1865bbd6f60",
"sha256:4e73ef678b1a859f0cb29e1d895526a20ea64b5ffd510a2307b5998c7df24281",
"sha256:559bce3d31484b665259f50cd94c5c28b961b09315ccd838f284687245f416e5",
"sha256:abbe53a39734ef4aba061fca54e30c6b4639d3e1f59653f0da37a0003de148c7",
"sha256:c845db36ba616912074c5b1ee897f8e0124df269468f25e4fe21fe72f6edd7a9",
"sha256:c9399267c926a4e7c418baa5cbe91c7d1cf362d505a1ef898fde44a07c9dd8a5"
],
"version": "==5.1.1"
"version": "==6.0.3"
},
"url-normalize": {
"hashes": [
@ -492,40 +736,54 @@
},
"urllib3": {
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
"sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
"sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
],
"version": "==1.24.1"
"version": "==1.25.3"
},
"uwhois": {
"editable": true,
"git": "https://github.com/Rafiot/uwhoisd.git",
"ref": "f6f035e52213c8abc20f2084d28cfffb399457cb",
"ref": "411572840eba4c72dc321c549b36a54ed5cea9de",
"subdirectory": "client"
},
"vulners": {
"hashes": [
"sha256:5f05404041cfaa8e5367bf884fc9ee319ebf34bedc495d7f84c433fa121cdb49",
"sha256:919b24df64ea55b6a8ba13e2a0530578f8a4be6a9cee257bf2214046e81c6f35",
"sha256:d45ecb13f5111947056a2dcc071b3e3fd45f6ad654eda06526245bba3850325e"
"sha256:146ef130f215b50cdff790b06b4886c7edb325c075e9fce4bf1d3ab8d64a10d0",
"sha256:53406a86126159eaee9575fa667c99459bfdf9dd8c06bd0ce73fbe536b305e30",
"sha256:a258ccdbaee586207bc80d3590f0315ff151cfe16ea54f2e1629a6018fd9f2a3"
],
"index": "pypi",
"version": "==1.4.0"
"version": "==1.5.0"
},
"wand": {
"hashes": [
"sha256:3e59e4bda9ef9d643d90e881cc950c8eee1508ec2cde1c150a1cbd5a12c1c007",
"sha256:52763dbf65d00cf98d7bc910b49329eea15896249c5555d47e169f2b6efbe166"
"sha256:1d3808e5d7a722096866b1eaa1743f29eb663289e140c5306d6291e1d581fed5",
"sha256:c97029751f595d96ae0042aec0e26ff114e403e060ae2481124abbcca0c65ce2"
],
"index": "pypi",
"version": "==0.5.0"
"version": "==0.5.5"
},
"wrapt": {
"hashes": [
"sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
],
"version": "==1.11.2"
},
"xlrd": {
"hashes": [
"sha256:546eb36cee8db40c3eaa46c351e67ffee6eeb5fa2650b71bc4c758a29a1b29b2",
"sha256:e551fb498759fa3a5384a94ccd4c3c02eb7c00ea424426e212ac0c57be9dfbde"
],
"index": "pypi",
"version": "==1.2.0"
},
"xlsxwriter": {
"hashes": [
"sha256:7cc07619760641b67112dbe0df938399d4d915d9b9924bb58eb5c17384d29cc6",
"sha256:ae22658a0fc5b9e875fa97c213d1ffd617d86dc49bf08be99ebdac814db7bf36"
"sha256:5ec6aa71f6ae4b6298376d8b6a56ca9cdcb8b80323a444212226447aed4fa10f",
"sha256:ec51d99c0cc5d95ec8d8e9c8de7c8fbbf461988bec01a8c86b5155a6716b0a5a"
],
"version": "==1.1.2"
"version": "==1.1.8"
},
"yara-python": {
"hashes": [
@ -571,17 +829,17 @@
},
"attrs": {
"hashes": [
"sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69",
"sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"
"sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
"sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
],
"version": "==18.2.0"
"version": "==19.1.0"
},
"certifi": {
"hashes": [
"sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
"sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
"sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
"sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
],
"version": "==2018.11.29"
"version": "==2019.6.16"
},
"chardet": {
"hashes": [
@ -600,39 +858,39 @@
},
"coverage": {
"hashes": [
"sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f",
"sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe",
"sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d",
"sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0",
"sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607",
"sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d",
"sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b",
"sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3",
"sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e",
"sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815",
"sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36",
"sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1",
"sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14",
"sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c",
"sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794",
"sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b",
"sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840",
"sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd",
"sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82",
"sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952",
"sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389",
"sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f",
"sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4",
"sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da",
"sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647",
"sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d",
"sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42",
"sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478",
"sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b",
"sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb",
"sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9"
"sha256:3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9",
"sha256:39e088da9b284f1bd17c750ac672103779f7954ce6125fd4382134ac8d152d74",
"sha256:3c205bc11cc4fcc57b761c2da73b9b72a59f8d5ca89979afb0c1c6f9e53c7390",
"sha256:465ce53a8c0f3a7950dfb836438442f833cf6663d407f37d8c52fe7b6e56d7e8",
"sha256:48020e343fc40f72a442c8a1334284620f81295256a6b6ca6d8aa1350c763bbe",
"sha256:5296fc86ab612ec12394565c500b412a43b328b3907c0d14358950d06fd83baf",
"sha256:5f61bed2f7d9b6a9ab935150a6b23d7f84b8055524e7be7715b6513f3328138e",
"sha256:68a43a9f9f83693ce0414d17e019daee7ab3f7113a70c79a3dd4c2f704e4d741",
"sha256:6b8033d47fe22506856fe450470ccb1d8ba1ffb8463494a15cfc96392a288c09",
"sha256:7ad7536066b28863e5835e8cfeaa794b7fe352d99a8cded9f43d1161be8e9fbd",
"sha256:7bacb89ccf4bedb30b277e96e4cc68cd1369ca6841bde7b005191b54d3dd1034",
"sha256:839dc7c36501254e14331bcb98b27002aa415e4af7ea039d9009409b9d2d5420",
"sha256:8f9a95b66969cdea53ec992ecea5406c5bd99c9221f539bca1e8406b200ae98c",
"sha256:932c03d2d565f75961ba1d3cec41ddde00e162c5b46d03f7423edcb807734eab",
"sha256:988529edadc49039d205e0aa6ce049c5ccda4acb2d6c3c5c550c17e8c02c05ba",
"sha256:998d7e73548fe395eeb294495a04d38942edb66d1fa61eb70418871bc621227e",
"sha256:9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609",
"sha256:9e80d45d0c7fcee54e22771db7f1b0b126fb4a6c0a2e5afa72f66827207ff2f2",
"sha256:a545a3dfe5082dc8e8c3eb7f8a2cf4f2870902ff1860bd99b6198cfd1f9d1f49",
"sha256:a5d8f29e5ec661143621a8f4de51adfb300d7a476224156a39a392254f70687b",
"sha256:aca06bfba4759bbdb09bf52ebb15ae20268ee1f6747417837926fae990ebc41d",
"sha256:bb23b7a6fd666e551a3094ab896a57809e010059540ad20acbeec03a154224ce",
"sha256:bfd1d0ae7e292105f29d7deaa9d8f2916ed8553ab9d5f39ec65bcf5deadff3f9",
"sha256:c62ca0a38958f541a73cf86acdab020c2091631c137bd359c4f5bddde7b75fd4",
"sha256:c709d8bda72cf4cd348ccec2a4881f2c5848fd72903c185f363d361b2737f773",
"sha256:c968a6aa7e0b56ecbd28531ddf439c2ec103610d3e2bf3b75b813304f8cb7723",
"sha256:df785d8cb80539d0b55fd47183264b7002077859028dfe3070cf6359bf8b2d9c",
"sha256:f406628ca51e0ae90ae76ea8398677a921b36f0bd71aab2099dfed08abd0322f",
"sha256:f46087bbd95ebae244a0eda01a618aff11ec7a069b15a3ef8f6b520db523dcf1",
"sha256:f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260",
"sha256:fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a"
],
"version": "==4.5.2"
"version": "==4.5.3"
},
"entrypoints": {
"hashes": [
@ -643,11 +901,11 @@
},
"flake8": {
"hashes": [
"sha256:09b9bb539920776da542e67a570a5df96ff933c9a08b62cfae920bcc789e4383",
"sha256:e0f8cd519cfc0072c0ee31add5def09d2b3ef6040b34dc426445c3af9b02163c"
"sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
"sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
],
"index": "pypi",
"version": "==3.7.4"
"version": "==3.7.8"
},
"idna": {
"hashes": [
@ -656,6 +914,13 @@
],
"version": "==2.8"
},
"importlib-metadata": {
"hashes": [
"sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
"sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
],
"version": "==0.18"
},
"mccabe": {
"hashes": [
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
@ -665,11 +930,10 @@
},
"more-itertools": {
"hashes": [
"sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
"sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
"sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
"sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
"sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
],
"version": "==5.0.0"
"version": "==7.2.0"
},
"nose": {
"hashes": [
@ -680,19 +944,26 @@
"index": "pypi",
"version": "==1.3.7"
},
"packaging": {
"hashes": [
"sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
"sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
],
"version": "==19.0"
},
"pluggy": {
"hashes": [
"sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616",
"sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a"
"sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
"sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
],
"version": "==0.8.1"
"version": "==0.12.0"
},
"py": {
"hashes": [
"sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694",
"sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"
"sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
"sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
],
"version": "==1.7.0"
"version": "==1.8.0"
},
"pycodestyle": {
"hashes": [
@ -703,26 +974,33 @@
},
"pyflakes": {
"hashes": [
"sha256:5e8c00e30c464c99e0b501dc160b13a14af7f27d4dffb529c556e30a159e231d",
"sha256:f277f9ca3e55de669fba45b7393a1449009cff5a37d1af10ebb76c52765269cd"
"sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
"sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
],
"version": "==2.1.0"
"version": "==2.1.1"
},
"pyparsing": {
"hashes": [
"sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
"sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
],
"version": "==2.4.0"
},
"pytest": {
"hashes": [
"sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07",
"sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d"
"sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
"sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
],
"index": "pypi",
"version": "==4.2.0"
"version": "==5.0.1"
},
"requests": {
"hashes": [
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
],
"index": "pypi",
"version": "==2.21.0"
"version": "==2.22.0"
},
"six": {
"hashes": [
@ -733,10 +1011,24 @@
},
"urllib3": {
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
"sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
"sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
],
"version": "==1.24.1"
"version": "==1.25.3"
},
"wcwidth": {
"hashes": [
"sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
],
"version": "==0.1.7"
},
"zipp": {
"hashes": [
"sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
"sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
],
"version": "==0.5.2"
}
}
}

View File

@ -5,6 +5,8 @@
[![codecov](https://codecov.io/gh/MISP/misp-modules/branch/master/graph/badge.svg)](https://codecov.io/gh/MISP/misp-modules)
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2F8ear%2Fmisp-modules.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2F8ear%2Fmisp-modules?ref=badge_shield)
## About
MISP modules are autonomous modules that can be used for expansion and other services in [MISP](https://github.com/MISP/MISP).
The modules are written in Python 3 following a simple API interface. The objective is to ease the extensions of MISP functionalities
@ -14,12 +16,11 @@ MISP modules support is included in MISP starting from version `2.4.28`.
For more information: [Extending MISP with Python modules](https://www.circl.lu/assets/files/misp-training/switch2016/2-misp-modules.pdf) slides from MISP training.
# Documentation
## Documentation
The new documentation can found [here](https://misp.github.io/misp-modules).
# License
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2F8ear%2Fmisp-modules.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2F8ear%2Fmisp-modules?ref=badge_large)
## License
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2F8ear%2Fmisp-modules.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2F8ear%2Fmisp-modules?ref=badge_large)

View File

@ -1,63 +1,82 @@
-i https://pypi.org/simple
-e .
-e git+https://github.com/D4-project/BGP-Ranking.git/@7e698f87366e6f99b4d0d11852737db28e3ddc62#egg=pybgpranking&subdirectory=client
-e git+https://github.com/D4-project/IPASN-History.git/@e846cd36fe1ed6b22f60890bba89f84e61b62e59#egg=pyipasnhistory&subdirectory=client
-e git+https://github.com/D4-project/BGP-Ranking.git/@429cea9c0787876820984a2df4e982449a84c10e#egg=pybgpranking&subdirectory=client
-e git+https://github.com/D4-project/IPASN-History.git/@47cd0f2658ab172fce42126ff3a1dbcddfb0b5fb#egg=pyipasnhistory&subdirectory=client
-e git+https://github.com/MISP/PyIntel471.git@0df8d51f1c1425de66714b3a5a45edb69b8cc2fc#egg=pyintel471
-e git+https://github.com/MISP/PyMISP.git@2c877f2aec11b7f5d2f23dfc5ce7398b2ce33b48#egg=pymisp
-e git+https://github.com/Rafiot/uwhoisd.git@f6f035e52213c8abc20f2084d28cfffb399457cb#egg=uwhois&subdirectory=client
-e git+https://github.com/MISP/PyMISP.git@583fb6592495ea358aad47a8a1ec92d43c13348a#egg=pymisp
-e git+https://github.com/Rafiot/uwhoisd.git@411572840eba4c72dc321c549b36a54ed5cea9de#egg=uwhois&subdirectory=client
-e git+https://github.com/cartertemm/ODTReader.git/@49d6938693f6faa3ff09998f86dba551ae3a996b#egg=odtreader
-e git+https://github.com/sebdraven/pydnstrails@48c1f740025c51289f43a24863d1845ff12fd21a#egg=pydnstrails
-e git+https://github.com/sebdraven/pyonyphe@66329baeee7cab844f2203c047c2551828eaf14d#egg=pyonyphe
-e git+https://github.com/sebdraven/pyonyphe@cbb0168d5cb28a9f71f7ab3773164a7039ccdb12#egg=pyonyphe
aiohttp==3.4.4
antlr4-python3-runtime==4.7.2 ; python_version >= '3'
async-timeout==3.0.1
attrs==18.2.0
attrs==19.1.0
backscatter==0.2.4
beautifulsoup4==4.7.1
blockchain==1.4.4
certifi==2018.11.29
certifi==2019.3.9
chardet==3.0.4
click-plugins==1.0.4
click-plugins==1.1.1
click==7.0
colorama==0.4.1
dnspython==1.16.0
domaintools-api==0.3.3
enum-compat==0.0.2
ez-setup==0.9
ezodf==0.3.2
future==0.17.1
httplib2==0.12.0
httplib2==0.12.3
idna-ssl==1.1.0 ; python_version < '3.7'
idna==2.8
isodate==0.6.0
jsonschema==2.6.0
jbxapi==3.1.3
jsonschema==3.0.1
lxml==4.3.3
maclookup==1.0.3
multidict==4.5.2
np==1.0.2
numpy==1.16.3
oauth2==1.9.0.post1
opencv-python==4.1.0.25
pandas-ods-reader==0.0.6
pandas==0.24.2
passivetotal==1.0.30
pillow==5.4.1
psutil==5.5.0
pdftotext==2.1.1
pillow==6.0.0
psutil==5.6.2
pyeupi==1.0
pygeoip==0.3.2
pyparsing==2.3.1
pypdns==1.3
pyparsing==2.4.0
pypdns==1.4.1
pypssl==2.1
pyrsistent==0.15.2
pytesseract==0.2.6
python-dateutil==2.7.5
pyyaml==3.13
python-dateutil==2.8.0
python-docx==0.8.10
python-pptx==0.6.18
pytz==2019.1
pyyaml==5.1
pyzbar==0.1.8
rdflib==4.2.2
redis==3.1.0
requests-cache==0.4.13
requests==2.21.0
shodan==1.10.4
sigmatools==0.7.1
redis==3.2.1
reportlab==3.5.21
requests-cache==0.5.0
requests==2.22.0
shodan==1.13.0
sigmatools==0.10
six==1.12.0
soupsieve==1.7.3
sparqlwrapper==1.8.2
soupsieve==1.9.1
sparqlwrapper==1.8.4
stix2-patterns==1.1.0
tornado==5.1.1
tabulate==0.8.3
tornado==6.0.2
url-normalize==1.4.1
urlarchiver==0.2
urllib3==1.24.1
vulners==1.4.0
wand==0.5.0
xlsxwriter==1.1.2
urllib3==1.25.3
vulners==1.5.0
wand==0.5.3
xlrd==1.2.0
xlsxwriter==1.1.8
yara-python==3.8.1
yarl==1.3.0

View File

@ -1 +0,0 @@
documentation.md

1602
doc/README.md Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,9 @@
{
"description": "Query backscatter.io (https://backscatter.io/).",
"requirements": ["backscatter python library"],
"features": "The module takes a source or destination IP address as input and displays the information known by backscatter.io.\n\n",
"logo": "logos/backscatter_io.png",
"references": ["https://pypi.org/project/backscatter/"],
"input": "IP addresses.",
"output": "Text containing a history of the IP addresses especially on scanning based on backscatter.io information ."
}

View File

@ -0,0 +1,9 @@
{
"description": "An expansion hover module to query a special dns blacklist to check if a bitcoin address has been abused.",
"requirements": ["dnspython3: dns python library"],
"features": "The module queries a dns blacklist directly with the bitcoin address and get a response if the address has been abused.",
"logo": "logos/bitcoin.png",
"input": "btc address attribute.",
"output" : "Text to indicate if the BTC address has been abused.",
"references": ["https://btcblack.it/"]
}

View File

@ -0,0 +1,9 @@
{
"description": "An expansion module to submit files and URLs to Cuckoo Sandbox.",
"logo": "logos/cuckoo.png",
"requirements": ["Access to a Cuckoo Sandbox API and an API key if the API requires it. (api_url and api_key)"],
"input": "A malware-sample or attachment for files. A url or domain for URLs.",
"output": "A text field containing 'Cuckoo task id: <id>'",
"references": ["https://cuckoosandbox.org/", "https://cuckoo.sh/docs/"],
"features": "The module takes a malware-sample, attachment, url or domain and submits it to Cuckoo Sandbox.\n The returned task id can be used to retrieve results when the analysis completed."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to extract freetext from a .docx document.",
"logo": "logos/docx.png",
"requirements": ["docx python library"],
"input": "Attachment attribute containing a .docx document.",
"output": "Text and freetext parsed from the document.",
"references": [],
"features": "The module reads the text contained in a .docx document. The result is passed to the freetext import parser so IoCs can be extracted out of it."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to access GreyNoise.io API",
"logo": "logos/greynoise.png",
"requirements": [],
"input": "An IP address.",
"output": "Additional information about the IP fetched from Greynoise API.",
"references": ["https://greynoise.io/", "https://github.com/GreyNoise-Intelligence/api.greynoise.io"],
"features": "The module takes an IP address as input and queries Greynoise for some additional information about it. The result is returned as text."
}

9
doc/expansion/hibp.json Normal file
View File

@ -0,0 +1,9 @@
{
"description": "Module to access haveibeenpwned.com API.",
"logo": "logos/hibp.png",
"requirements": [],
"input": "An email address",
"output": "Additional information about the email address.",
"references": ["https://haveibeenpwned.com/"],
"features": "The module takes an email address as input and queries haveibeenpwned.com API to find additional information about it. This additional information actually tells if any account using the email address has already been compromised in a data breach."
}

View File

@ -0,0 +1,9 @@
{
"description": "Query Joe Sandbox API with a submission url to get the json report and extract its data that is parsed and converted into MISP attributes and objects.\n\nThis url can by the way come from the result of the [joesandbox_submit expansion module](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_submit.py).",
"logo": "logos/joesandbox.png",
"requirements": ["jbxapi: Joe Sandbox API python3 library"],
"input": "Link of a Joe Sandbox sample or url submission.",
"output": "MISP attributes & objects parsed from the analysis report.",
"references": ["https://www.joesecurity.org", "https://www.joesandbox.com/"],
"features": "Module using the new format of modules able to return attributes and objects.\n\nThe module returns the same results as the import module [joe_import](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/import_mod/joe_import.py) taking directly the json report as input.\n\nEven if the introspection will allow all kinds of links to call this module, obviously only the ones presenting a sample or url submission in the Joe Sandbox API will return results.\n\nTo make it work you will need to fill the 'apikey' configuration with your Joe Sandbox API key and provide a valid link as input."
}

View File

@ -0,0 +1,9 @@
{
"description": "A module to submit files or URLs to Joe Sandbox for an advanced analysis, and return the link of the submission.",
"logo": "logos/joesandbox.png",
"requirements": ["jbxapi: Joe Sandbox API python3 library"],
"input": "Sample, url (or domain) to submit to Joe Sandbox for an advanced analysis.",
"output": "Link of the data in input submitted to Joe Sandbox.",
"references": ["https://www.joesecurity.org", "https://www.joesandbox.com/"],
"features": "The module requires a Joe Sandbox API key to submit files or URL, and returns the link of the submitted analysis.\n\nIt is then possible, when the analysis is completed, to query the Joe Sandbox API to get the data related to the analysis, using the [joesandbox_query module](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_query.py) directly on this submission link."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to access Macvendors API.",
"logo": "logos/macvendors.png",
"requirements": [],
"input": "A MAC address.",
"output": "Additional information about the MAC address.",
"references": ["https://macvendors.com/", "https://macvendors.com/api"],
"features": "The module takes a MAC address as input and queries macvendors.com for some information about it. The API returns the name of the vendor related to the address."
}

View File

@ -0,0 +1,8 @@
{
"description": "Module to process some optical character recognition on pictures.",
"requirements": ["cv2: The OpenCV python library."],
"input": "A picture attachment.",
"output": "Text and freetext fetched from the input picture.",
"references": [],
"features": "The module takes an attachment attributes as input and process some optical character recognition on it. The text found is then passed to the Freetext importer to extract potential IoCs."
}

View File

@ -0,0 +1,10 @@
{
"description": "Module to extract freetext from a .ods document.",
"logo": "logos/ods.png",
"requirements": ["ezodf: Python package to create/manipulate OpenDocumentFormat files.",
"pandas_ods_reader: Python library to read in ODS files."],
"input": "Attachment attribute containing a .ods document.",
"output": "Text and freetext parsed from the document.",
"references": [],
"features": "The module reads the text contained in a .ods document. The result is passed to the freetext import parser so IoCs can be extracted out of it."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to extract freetext from a .odt document.",
"logo": "logos/odt.png",
"requirements": ["ODT reader python library."],
"input": "Attachment attribute containing a .odt document.",
"output": "Text and freetext parsed from the document.",
"references": [],
"features": "The module reads the text contained in a .odt document. The result is passed to the freetext import parser so IoCs can be extracted out of it."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to extract freetext from a PDF document.",
"logo": "logos/pdf.jpg",
"requirements": ["pdftotext: Python library to extract text from PDF."],
"input": "Attachment attribute containing a PDF document.",
"output": "Text and freetext parsed from the document.",
"references": [],
"features": "The module reads the text contained in a PDF document. The result is passed to the freetext import parser so IoCs can be extracted out of it."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to extract freetext from a .pptx document.",
"logo": "logos/pptx.png",
"requirements": ["pptx: Python library to read PowerPoint files."],
"input": "Attachment attribute containing a .pptx document.",
"output": "Text and freetext parsed from the document.",
"references": [],
"features": "The module reads the text contained in a .pptx document. The result is passed to the freetext import parser so IoCs can be extracted out of it."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to decode QR codes.",
"requirements": ["cv2: The OpenCV python library.",
"pyzbar: Python library to read QR codes."],
"input": "A QR code stored as attachment attribute.",
"output": "The URL or bitcoin address the QR code is pointing to.",
"references": [],
"features": "The module reads the QR code and returns the related address, which can be an URL or a bitcoin address."
}

View File

@ -0,0 +1,9 @@
{
"description": "Query of the URLhaus API to get additional information about the input attribute.",
"logo": "logos/urlhaus.png",
"requirements": [],
"input": "A domain, hostname, url, ip, md5 or sha256 attribute.",
"output": "MISP attributes & objects fetched from the result of the URLhaus API query.",
"references": ["https://urlhaus.abuse.ch/"],
"features": "Module using the new format of modules able to return attributes and objects.\n\nThe module takes one of the attribute type specified as input, and query the URLhaus API with it. If any result is returned by the API, attributes and objects are created accordingly."
}

View File

@ -1,9 +1,9 @@
{
"description": "Module to get information from virustotal.",
"description": "Module to get advanced information from virustotal.",
"logo": "logos/virustotal.png",
"requirements": ["An access to the VirusTotal API (apikey)"],
"requirements": ["An access to the VirusTotal API (apikey), with a high request rate limit."],
"input": "A domain, hash (md5, sha1, sha256 or sha512), hostname or IP address attribute.",
"output": "MISP attributes mapped from the rersult of the query on VirusTotal API.",
"references": ["https://www.virustotal.com/"],
"features": "This module takes a MISP attribute as input and queries the VirusTotal API with it, in order to get additional data on the input attribute.\n\nMultiple recursive requests on the API can then be processed on some attributes found in the first request. A limit can be set to restrict the number of values to query again, and at the same time the number of request submitted to the API.\n\nThis limit is important because the default user VirusTotal apikey only allows to process a certain nunmber of queries per minute. As a consequence it is recommended to have a larger number of requests or a private apikey.\n\nData is then mapped into MISP attributes."
"output": "MISP attributes and objects resulting from the parsing of the VirusTotal report concerning the input attribute.",
"references": ["https://www.virustotal.com/", "https://developers.virustotal.com/reference"],
"features": "New format of modules able to return attributes and objects.\n\nA module to take a MISP attribute as input and query the VirusTotal API to get additional data about it.\n\nCompared to the [standard VirusTotal expansion module](https://github.com/MISP/misp-modules/blob/master/misp_modules/modules/expansion/virustotal_public.py), this module is made for advanced parsing of VirusTotal report, with a recursive analysis of the elements found after the first request.\n\nThus, it requires a higher request rate limit to avoid the API to return a 204 error (Request rate limit exceeded), and the data parsed from the different requests are returned as MISP attributes and objects, with the corresponding relations between each one of them."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to get information from VirusTotal.",
"logo": "logos/virustotal.png",
"requirements": ["An access to the VirusTotal API (apikey)"],
"input": "A domain, hostname, ip, url or hash (md5, sha1, sha256 or sha512) attribute.",
"output": "MISP attributes and objects resulting from the parsing of the VirusTotal report concerning the input attribute.",
"references": ["https://www.virustotal.com", "https://developers.virustotal.com/reference"],
"features": "New format of modules able to return attributes and objects.\n\nA module to take a MISP attribute as input and query the VirusTotal API to get additional data about it.\n\nCompared to the [more advanced VirusTotal expansion module](https://github.com/MISP/misp-modules/blob/master/misp_modules/modules/expansion/virustotal.py), this module is made for VirusTotal users who have a low request rate limit.\n\nThus, it only queries the API once and returns the results that is parsed into MISP attributes and objects."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to extract freetext from a .xlsx document.",
"logo": "logos/xlsx.png",
"requirements": ["pandas: Python library to perform data analysis, time series and statistics."],
"input": "Attachment attribute containing a .xlsx document.",
"output": "Text and freetext parsed from the document.",
"references": [],
"features": "The module reads the text contained in a .xlsx document. The result is passed to the freetext import parser so IoCs can be extracted out of it."
}

View File

@ -0,0 +1,9 @@
{
"description": "Module to export malicious network activity attributes to Cisco fireSIGHT manager block rules.",
"logo": "logos/cisco.png",
"requirements": ["Firesight manager console credentials"],
"input": "Network activity attributes (IPs, URLs).",
"output": "Cisco fireSIGHT manager block rules.",
"references": [],
"features": "The module goes through the attributes to find all the network activity ones in order to create block rules for the Cisco fireSIGHT manager."
}

View File

@ -1,7 +1,7 @@
{
"description": "Simple export of a MISP event to PDF.",
"requirements": ["PyMISP", "asciidoctor"],
"features": "The module takes care of the PDF file building, and work with any MISP Event. Except the requirement of asciidoctor, used to create the file, there is no special feature concerning the Event.",
"requirements": ["PyMISP", "reportlab"],
"features": "The module takes care of the PDF file building, and work with any MISP Event. Except the requirement of reportlab, used to create the file, there is no special feature concerning the Event. Some parameters can be given through the config dict. 'MISP_base_url_for_dynamic_link' is your MISP URL, to attach an hyperlink to your event on your MISP instance from the PDF. Keep it clear to avoid hyperlinks in the generated pdf.\n 'MISP_name_for_metadata' is your CERT or MISP instance name. Used as text in the PDF' metadata\n 'Activate_textual_description' is a boolean (True or void) to activate the textual description/header abstract of an event\n 'Activate_galaxy_description' is a boolean (True or void) to activate the description of event related galaxies.\n 'Activate_related_events' is a boolean (True or void) to activate the description of related event. Be aware this might leak information on confidential events linked to the current event !\n 'Activate_internationalization_fonts' is a boolean (True or void) to activate Noto fonts instead of default fonts (Helvetica). This allows the support of CJK alphabet. Be sure to have followed the procedure to download Noto fonts (~70Mo) in the right place (/tools/pdf_fonts/Noto_TTF), to allow PyMisp to find and use them during PDF generation.\n 'Custom_fonts_path' is a text (path or void) to the TTF file of your choice, to create the PDF with it. Be aware the PDF won't support bold/italic/special style anymore with this option ",
"references": ["https://acrobat.adobe.com/us/en/acrobat/about-adobe-pdf.html"],
"input": "MISP Event",
"output": "MISP Event in a PDF file."

View File

@ -30,7 +30,7 @@ def generate_doc(root_path):
value = ', '.join(value) if isinstance(value, list) else '{}'.format(value.replace('\n', '\n>'))
markdown.append('- **{}**:\n>{}\n'.format(field, value))
markdown.append('\n-----\n')
with open('documentation.md', 'w') as w:
with open('README.md', 'w') as w:
w.write(''.join(markdown))
def generate_docs_for_mkdocs(root_path):

View File

@ -0,0 +1,9 @@
{
"description": "A module to import data from a Joe Sandbox analysis json report.",
"logo": "logos/joesandbox.png",
"requirements": [],
"input": "Json report of a Joe Sandbox analysis.",
"output": "MISP attributes & objects parsed from the analysis report.",
"references": ["https://www.joesecurity.org", "https://www.joesandbox.com/"],
"features": "Module using the new format of modules able to return attributes and objects.\n\nThe module returns the same results as the expansion module [joesandbox_query](https://github.com/MISP/misp-modules/tree/master/misp_modules/modules/expansion/joesandbox_query.py) using the submission link of the analysis to get the json report.\n\n"
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

BIN
doc/logos/cisco.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

BIN
doc/logos/docx.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

BIN
doc/logos/greynoise.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 112 KiB

BIN
doc/logos/hibp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

BIN
doc/logos/joesandbox.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.6 KiB

BIN
doc/logos/macvendors.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

BIN
doc/logos/ods.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.9 KiB

BIN
doc/logos/odt.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

BIN
doc/logos/pdf.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.8 KiB

BIN
doc/logos/pptx.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
doc/logos/urlhaus.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

BIN
doc/logos/xlsx.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.8 KiB

View File

@ -0,0 +1,14 @@
[Unit]
Description=System-wide instance of the MISP Modules
After=network.target
[Service]
User=www-data
Group=www-data
WorkingDirectory=/usr/local/src/misp-modules
Environment="PATH=/var/www/MISP/venv/bin"
ExecStart=/var/www/MISP/venv/bin/misp-modules -l 127.0.0.1 -s
[Install]
WantedBy=multi-user.target

View File

@ -55,7 +55,7 @@ log = logging.getLogger('misp-modules')
def handle_signal(sig, frame):
IOLoop.instance().add_callback(IOLoop.instance().stop)
IOLoop.instance().add_callback_from_signal(IOLoop.instance().stop)
def init_logger(level=False):
@ -210,37 +210,59 @@ class QueryModule(tornado.web.RequestHandler):
self.finish()
def _launch_from_current_dir():
log.info('Launch MISP modules server from current directory.')
os.chdir(os.path.dirname(__file__))
modulesdir = 'modules'
helpersdir = 'helpers'
load_helpers(helpersdir=helpersdir)
return load_modules(modulesdir)
def main():
global mhandlers
global loaded_modules
signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)
argParser = argparse.ArgumentParser(description='misp-modules server')
argParser = argparse.ArgumentParser(description='misp-modules server', formatter_class=argparse.RawTextHelpFormatter)
argParser.add_argument('-t', default=False, action='store_true', help='Test mode')
argParser.add_argument('-s', default=False, action='store_true', help='Run a system install (package installed via pip)')
argParser.add_argument('-d', default=False, action='store_true', help='Enable debugging')
argParser.add_argument('-p', default=6666, help='misp-modules TCP port (default 6666)')
argParser.add_argument('-l', default='localhost', help='misp-modules listen address (default localhost)')
argParser.add_argument('-m', default=[], action='append', help='Register a custom module')
argParser.add_argument('--devel', default=False, action='store_true', help='''Start in development mode, enable debug, start only the module(s) listed in -m.\nExample: -m misp_modules.modules.expansion.bgpranking''')
args = argParser.parse_args()
port = args.p
listen = args.l
log = init_logger(level=args.d)
if args.s:
log.info('Launch MISP modules server from package.')
load_package_helpers()
mhandlers, loaded_modules = load_package_modules()
if args.devel:
log = init_logger(level=True)
log.info('Launch MISP modules server in developement mode. Enable debug, load a list of modules is -m is used.')
if args.m:
mhandlers = {}
modules = []
for module in args.m:
splitted = module.split(".")
modulename = splitted[-1]
moduletype = splitted[2]
mhandlers[modulename] = importlib.import_module(module)
mhandlers['type:' + modulename] = moduletype
modules.append(modulename)
log.info('MISP modules {0} imported'.format(modulename))
else:
mhandlers, loaded_modules = _launch_from_current_dir()
else:
log.info('Launch MISP modules server from current directory.')
os.chdir(os.path.dirname(__file__))
modulesdir = 'modules'
helpersdir = 'helpers'
load_helpers(helpersdir=helpersdir)
mhandlers, loaded_modules = load_modules(modulesdir)
log = init_logger(level=args.d)
if args.s:
log.info('Launch MISP modules server from package.')
load_package_helpers()
mhandlers, loaded_modules = load_package_modules()
else:
mhandlers, loaded_modules = _launch_from_current_dir()
for module in args.m:
mispmod = importlib.import_module(module)
mispmod.register(mhandlers, loaded_modules)
for module in args.m:
mispmod = importlib.import_module(module)
mispmod.register(mhandlers, loaded_modules)
service = [(r'/modules', ListModules), (r'/query', QueryModule)]
@ -266,8 +288,11 @@ def main():
if args.t:
log.info('MISP modules started in test-mode, quitting immediately.')
sys.exit()
IOLoop.instance().start()
IOLoop.instance().stop()
try:
IOLoop.instance().start()
finally:
IOLoop.instance().stop()
return 0

View File

@ -0,0 +1 @@
all = ['joe_parser']

View File

@ -0,0 +1,423 @@
# -*- coding: utf-8 -*-
from collections import defaultdict
from datetime import datetime
from pymisp import MISPAttribute, MISPEvent, MISPObject
import json
arch_type_mapping = {'ANDROID': 'parse_apk', 'LINUX': 'parse_elf', 'WINDOWS': 'parse_pe'}
domain_object_mapping = {'@ip': ('ip-dst', 'ip'), '@name': ('domain', 'domain')}
dropped_file_mapping = {'@entropy': ('float', 'entropy'),
'@file': ('filename', 'filename'),
'@size': ('size-in-bytes', 'size-in-bytes'),
'@type': ('mime-type', 'mimetype')}
dropped_hash_mapping = {'MD5': 'md5', 'SHA': 'sha1', 'SHA-256': 'sha256', 'SHA-512': 'sha512'}
elf_object_mapping = {'epaddr': 'entrypoint-address', 'machine': 'arch', 'osabi': 'os_abi'}
elf_section_flags_mapping = {'A': 'ALLOC', 'I': 'INFO_LINK', 'M': 'MERGE',
'S': 'STRINGS', 'T': 'TLS', 'W': 'WRITE',
'X': 'EXECINSTR'}
file_object_fields = ['filename', 'md5', 'sha1', 'sha256', 'sha512', 'ssdeep']
file_object_mapping = {'entropy': ('float', 'entropy'),
'filesize': ('size-in-bytes', 'size-in-bytes'),
'filetype': ('mime-type', 'mimetype')}
file_references_mapping = {'fileCreated': 'creates', 'fileDeleted': 'deletes',
'fileMoved': 'moves', 'fileRead': 'reads', 'fileWritten': 'writes'}
network_behavior_fields = ('srcip', 'dstip', 'srcport', 'dstport')
network_connection_object_mapping = {'srcip': ('ip-src', 'ip-src'), 'dstip': ('ip-dst', 'ip-dst'),
'srcport': ('port', 'src-port'), 'dstport': ('port', 'dst-port')}
pe_object_fields = {'entrypoint': ('text', 'entrypoint-address'),
'imphash': ('imphash', 'imphash')}
pe_object_mapping = {'CompanyName': 'company-name', 'FileDescription': 'file-description',
'FileVersion': 'file-version', 'InternalName': 'internal-filename',
'LegalCopyright': 'legal-copyright', 'OriginalFilename': 'original-filename',
'ProductName': 'product-filename', 'ProductVersion': 'product-version',
'Translation': 'lang-id'}
pe_section_object_mapping = {'characteristics': ('text', 'characteristic'),
'entropy': ('float', 'entropy'),
'name': ('text', 'name'), 'rawaddr': ('hex', 'offset'),
'rawsize': ('size-in-bytes', 'size-in-bytes'),
'virtaddr': ('hex', 'virtual_address'),
'virtsize': ('size-in-bytes', 'virtual_size')}
process_object_fields = {'cmdline': 'command-line', 'name': 'name',
'parentpid': 'parent-pid', 'pid': 'pid',
'path': 'current-directory'}
protocols = {'tcp': 4, 'udp': 4, 'icmp': 3,
'http': 7, 'https': 7, 'ftp': 7}
registry_references_mapping = {'keyValueCreated': 'creates', 'keyValueModified': 'modifies'}
regkey_object_mapping = {'name': ('text', 'name'), 'newdata': ('text', 'data'),
'path': ('regkey', 'key')}
signerinfo_object_mapping = {'sigissuer': ('text', 'issuer'),
'version': ('text', 'version')}
class JoeParser():
def __init__(self):
self.misp_event = MISPEvent()
self.references = defaultdict(list)
self.attributes = defaultdict(lambda: defaultdict(set))
self.process_references = {}
def parse_data(self, data):
self.data = data
if self.analysis_type() == "file":
self.parse_fileinfo()
else:
self.parse_url_analysis()
self.parse_system_behavior()
self.parse_network_behavior()
self.parse_screenshot()
self.parse_network_interactions()
self.parse_dropped_files()
if self.attributes:
self.handle_attributes()
self.parse_mitre_attack()
def build_references(self):
for misp_object in self.misp_event.objects:
object_uuid = misp_object.uuid
if object_uuid in self.references:
for reference in self.references[object_uuid]:
misp_object.add_reference(**reference)
def handle_attributes(self):
for attribute_type, attribute in self.attributes.items():
for attribute_value, references in attribute.items():
attribute_uuid = self.create_attribute(attribute_type, attribute_value)
for reference in references:
source_uuid, relationship = reference
self.references[source_uuid].append(dict(referenced_uuid=attribute_uuid,
relationship_type=relationship))
def parse_dropped_files(self):
droppedinfo = self.data['droppedinfo']
if droppedinfo:
for droppedfile in droppedinfo['hash']:
file_object = MISPObject('file')
for key, mapping in dropped_file_mapping.items():
attribute_type, object_relation = mapping
file_object.add_attribute(object_relation, **{'type': attribute_type, 'value': droppedfile[key]})
if droppedfile['@malicious'] == 'true':
file_object.add_attribute('state', **{'type': 'text', 'value': 'Malicious'})
for h in droppedfile['value']:
hash_type = dropped_hash_mapping[h['@algo']]
file_object.add_attribute(hash_type, **{'type': hash_type, 'value': h['$']})
self.misp_event.add_object(**file_object)
self.references[self.process_references[(int(droppedfile['@targetid']), droppedfile['@process'])]].append({
'referenced_uuid': file_object.uuid,
'relationship_type': 'drops'
})
def parse_mitre_attack(self):
mitreattack = self.data['mitreattack']
if mitreattack:
for tactic in mitreattack['tactic']:
if tactic.get('technique'):
for technique in tactic['technique']:
self.misp_event.add_tag('misp-galaxy:mitre-attack-pattern="{} - {}"'.format(technique['name'], technique['id']))
def parse_network_behavior(self):
network = self.data['behavior']['network']
connections = defaultdict(lambda: defaultdict(set))
for protocol, layer in protocols.items():
if network.get(protocol):
for packet in network[protocol]['packet']:
timestamp = datetime.strptime(self.parse_timestamp(packet['timestamp']), '%b %d, %Y %H:%M:%S.%f')
connections[tuple(packet[field] for field in network_behavior_fields)][protocol].add(timestamp)
for connection, data in connections.items():
attributes = self.prefetch_attributes_data(connection)
if len(data.keys()) == len(set(protocols[protocol] for protocol in data.keys())):
network_connection_object = MISPObject('network-connection')
for object_relation, attribute in attributes.items():
network_connection_object.add_attribute(object_relation, **attribute)
network_connection_object.add_attribute('first-packet-seen',
**{'type': 'datetime', 'value': min(tuple(min(timestamp) for timestamp in data.values()))})
for protocol in data.keys():
network_connection_object.add_attribute('layer{}-protocol'.format(protocols[protocol]), **{'type': 'text', 'value': protocol})
self.misp_event.add_object(**network_connection_object)
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
relationship_type='initiates'))
else:
for protocol, timestamps in data.items():
network_connection_object = MISPObject('network-connection')
for object_relation, attribute in attributes.items():
network_connection_object.add_attribute(object_relation, **attribute)
network_connection_object.add_attribute('first-packet-seen', **{'type': 'datetime', 'value': min(timestamps)})
network_connection_object.add_attribute('layer{}-protocol'.format(protocols[protocol]), **{'type': 'text', 'value': protocol})
self.misp_event.add_object(**network_connection_object)
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=network_connection_object.uuid,
relationship_type='initiates'))
def parse_screenshot(self):
screenshotdata = self.data['behavior']['screenshotdata']
if screenshotdata:
screenshotdata = screenshotdata['interesting']['$']
attribute = {'type': 'attachment', 'value': 'screenshot.jpg',
'data': screenshotdata, 'disable_correlation': True}
self.misp_event.add_attribute(**attribute)
def parse_system_behavior(self):
system = self.data['behavior']['system']
if system.get('processes'):
process_activities = {'fileactivities': self.parse_fileactivities,
'registryactivities': self.parse_registryactivities}
for process in system['processes']['process']:
general = process['general']
process_object = MISPObject('process')
for feature, relation in process_object_fields.items():
process_object.add_attribute(relation, **{'type': 'text', 'value': general[feature]})
start_time = datetime.strptime('{} {}'.format(general['date'], general['time']), '%d/%m/%Y %H:%M:%S')
process_object.add_attribute('start-time', **{'type': 'datetime', 'value': start_time})
self.misp_event.add_object(**process_object)
for field, to_call in process_activities.items():
if process.get(field):
to_call(process_object.uuid, process[field])
self.references[self.analysisinfo_uuid].append(dict(referenced_uuid=process_object.uuid,
relationship_type='calls'))
self.process_references[(general['targetid'], general['path'])] = process_object.uuid
def parse_fileactivities(self, process_uuid, fileactivities):
for feature, files in fileactivities.items():
# ignore unknown features
if feature not in file_references_mapping:
continue
if files:
for call in files['call']:
self.attributes['filename'][call['path']].add((process_uuid, file_references_mapping[feature]))
def analysis_type(self):
generalinfo = self.data['generalinfo']
if generalinfo['target']['sample']:
return "file"
elif generalinfo['target']['url']:
return "url"
else:
raise Exception("Unknown analysis type")
def parse_url_analysis(self):
generalinfo = self.data["generalinfo"]
url_object = MISPObject("url")
self.analysisinfo_uuid = url_object.uuid
url_object.add_attribute("url", generalinfo["target"]["url"])
self.misp_event.add_object(**url_object)
def parse_fileinfo(self):
fileinfo = self.data['fileinfo']
file_object = MISPObject('file')
self.analysisinfo_uuid = file_object.uuid
for field in file_object_fields:
file_object.add_attribute(field, **{'type': field, 'value': fileinfo[field]})
for field, mapping in file_object_mapping.items():
attribute_type, object_relation = mapping
file_object.add_attribute(object_relation, **{'type': attribute_type, 'value': fileinfo[field]})
arch = self.data['generalinfo']['arch']
if arch in arch_type_mapping:
to_call = arch_type_mapping[arch]
getattr(self, to_call)(fileinfo, file_object)
else:
self.misp_event.add_object(**file_object)
def parse_apk(self, fileinfo, file_object):
apkinfo = fileinfo['apk']
self.misp_event.add_object(**file_object)
permission_lists = defaultdict(list)
for permission in apkinfo['requiredpermissions']['permission']:
permission = permission['@name'].split('.')
permission_lists[' '.join(permission[:-1])].append(permission[-1])
attribute_type = 'text'
for comment, permissions in permission_lists.items():
permission_object = MISPObject('android-permission')
permission_object.add_attribute('comment', **dict(type=attribute_type, value=comment))
for permission in permissions:
permission_object.add_attribute('permission', **dict(type=attribute_type, value=permission))
self.misp_event.add_object(**permission_object)
self.references[file_object.uuid].append(dict(referenced_uuid=permission_object.uuid,
relationship_type='grants'))
def parse_elf(self, fileinfo, file_object):
elfinfo = fileinfo['elf']
self.misp_event.add_object(**file_object)
attribute_type = 'text'
relationship = 'includes'
size = 'size-in-bytes'
for fileinfo in elfinfo['file']:
elf_object = MISPObject('elf')
self.references[file_object.uuid].append(dict(referenced_uuid=elf_object.uuid,
relationship_type=relationship))
elf = fileinfo['main'][0]['header'][0]
if elf.get('type'):
# Haven't seen anything but EXEC yet in the files I tested
attribute_value = "EXECUTABLE" if elf['type'] == "EXEC (Executable file)" else elf['type']
elf_object.add_attribute('type', **dict(type=attribute_type, value=attribute_value))
for feature, relation in elf_object_mapping.items():
if elf.get(feature):
elf_object.add_attribute(relation, **dict(type=attribute_type, value=elf[feature]))
sections_number = len(fileinfo['sections']['section'])
elf_object.add_attribute('number-sections', **{'type': 'counter', 'value': sections_number})
self.misp_event.add_object(**elf_object)
for section in fileinfo['sections']['section']:
section_object = MISPObject('elf-section')
for feature in ('name', 'type'):
if section.get(feature):
section_object.add_attribute(feature, **dict(type=attribute_type, value=section[feature]))
if section.get('size'):
section_object.add_attribute(size, **dict(type=size, value=int(section['size'], 16)))
for flag in section['flagsdesc']:
try:
attribute_value = elf_section_flags_mapping[flag]
section_object.add_attribute('flag', **dict(type=attribute_type, value=attribute_value))
except KeyError:
print(f'Unknown elf section flag: {flag}')
continue
self.misp_event.add_object(**section_object)
self.references[elf_object.uuid].append(dict(referenced_uuid=section_object.uuid,
relationship_type=relationship))
def parse_pe(self, fileinfo, file_object):
try:
peinfo = fileinfo['pe']
except KeyError:
self.misp_event.add_object(**file_object)
return
pe_object = MISPObject('pe')
relationship = 'includes'
file_object.add_reference(pe_object.uuid, relationship)
self.misp_event.add_object(**file_object)
for field, mapping in pe_object_fields.items():
attribute_type, object_relation = mapping
pe_object.add_attribute(object_relation, **{'type': attribute_type, 'value': peinfo[field]})
pe_object.add_attribute('compilation-timestamp', **{'type': 'datetime', 'value': int(peinfo['timestamp'].split()[0], 16)})
program_name = fileinfo['filename']
if peinfo['versions']:
for feature in peinfo['versions']['version']:
name = feature['name']
if name == 'InternalName':
program_name = feature['value']
if name in pe_object_mapping:
pe_object.add_attribute(pe_object_mapping[name], **{'type': 'text', 'value': feature['value']})
sections_number = len(peinfo['sections']['section'])
pe_object.add_attribute('number-sections', **{'type': 'counter', 'value': sections_number})
signatureinfo = peinfo['signature']
if signatureinfo['signed']:
signerinfo_object = MISPObject('authenticode-signerinfo')
pe_object.add_reference(signerinfo_object.uuid, 'signed-by')
self.misp_event.add_object(**pe_object)
signerinfo_object.add_attribute('program-name', **{'type': 'text', 'value': program_name})
for feature, mapping in signerinfo_object_mapping.items():
attribute_type, object_relation = mapping
signerinfo_object.add_attribute(object_relation, **{'type': attribute_type, 'value': signatureinfo[feature]})
self.misp_event.add_object(**signerinfo_object)
else:
self.misp_event.add_object(**pe_object)
for section in peinfo['sections']['section']:
section_object = self.parse_pe_section(section)
self.references[pe_object.uuid].append(dict(referenced_uuid=section_object.uuid,
relationship_type=relationship))
self.misp_event.add_object(**section_object)
def parse_pe_section(self, section):
section_object = MISPObject('pe-section')
for feature, mapping in pe_section_object_mapping.items():
if section.get(feature):
attribute_type, object_relation = mapping
section_object.add_attribute(object_relation, **{'type': attribute_type, 'value': section[feature]})
return section_object
def parse_network_interactions(self):
domaininfo = self.data['domaininfo']
if domaininfo:
for domain in domaininfo['domain']:
if domain['@ip'] != 'unknown':
domain_object = MISPObject('domain-ip')
for key, mapping in domain_object_mapping.items():
attribute_type, object_relation = mapping
domain_object.add_attribute(object_relation,
**{'type': attribute_type, 'value': domain[key]})
self.misp_event.add_object(**domain_object)
reference = dict(referenced_uuid=domain_object.uuid, relationship_type='contacts')
self.add_process_reference(domain['@targetid'], domain['@currentpath'], reference)
else:
attribute = MISPAttribute()
attribute.from_dict(**{'type': 'domain', 'value': domain['@name']})
self.misp_event.add_attribute(**attribute)
reference = dict(referenced_uuid=attribute.uuid, relationship_type='contacts')
self.add_process_reference(domain['@targetid'], domain['@currentpath'], reference)
ipinfo = self.data['ipinfo']
if ipinfo:
for ip in ipinfo['ip']:
attribute = MISPAttribute()
attribute.from_dict(**{'type': 'ip-dst', 'value': ip['@ip']})
self.misp_event.add_attribute(**attribute)
reference = dict(referenced_uuid=attribute.uuid, relationship_type='contacts')
self.add_process_reference(ip['@targetid'], ip['@currentpath'], reference)
urlinfo = self.data['urlinfo']
if urlinfo:
for url in urlinfo['url']:
target_id = int(url['@targetid'])
current_path = url['@currentpath']
attribute = MISPAttribute()
attribute_dict = {'type': 'url', 'value': url['@name']}
if target_id != -1 and current_path != 'unknown':
self.references[self.process_references[(target_id, current_path)]].append({
'referenced_uuid': attribute.uuid,
'relationship_type': 'contacts'
})
else:
attribute_dict['comment'] = 'From Memory - Enriched via the joe_import module'
attribute.from_dict(**attribute_dict)
self.misp_event.add_attribute(**attribute)
def parse_registryactivities(self, process_uuid, registryactivities):
if registryactivities['keyCreated']:
for call in registryactivities['keyCreated']['call']:
self.attributes['regkey'][call['path']].add((process_uuid, 'creates'))
for feature, relationship in registry_references_mapping.items():
if registryactivities[feature]:
for call in registryactivities[feature]['call']:
registry_key = MISPObject('registry-key')
for field, mapping in regkey_object_mapping.items():
attribute_type, object_relation = mapping
registry_key.add_attribute(object_relation, **{'type': attribute_type, 'value': call[field]})
registry_key.add_attribute('data-type', **{'type': 'text', 'value': 'REG_{}'.format(call['type'].upper())})
self.misp_event.add_object(**registry_key)
self.references[process_uuid].append(dict(referenced_uuid=registry_key.uuid,
relationship_type=relationship))
def add_process_reference(self, target, currentpath, reference):
try:
self.references[self.process_references[(int(target), currentpath)]].append(reference)
except KeyError:
self.references[self.analysisinfo_uuid].append(reference)
def create_attribute(self, attribute_type, attribute_value):
attribute = MISPAttribute()
attribute.from_dict(**{'type': attribute_type, 'value': attribute_value})
self.misp_event.add_attribute(**attribute)
return attribute.uuid
def finalize_results(self):
if self.references:
self.build_references()
event = json.loads(self.misp_event.to_json())['Event']
self.results = {key: event[key] for key in ('Attribute', 'Object', 'Tag') if (key in event and event[key])}
@staticmethod
def parse_timestamp(timestamp):
timestamp = timestamp.split(':')
timestamp[-1] = str(round(float(timestamp[-1].split(' ')[0]), 6))
return ':'.join(timestamp)
@staticmethod
def prefetch_attributes_data(connection):
attributes = {}
for field, value in zip(network_behavior_fields, connection):
attribute_type, object_relation = network_connection_object_mapping[field]
attributes[object_relation] = {'type': attribute_type, 'value': value}
return attributes

View File

@ -1,11 +1,17 @@
from . import _vmray # noqa
import os
import sys
sys.path.append('{}/lib'.format('/'.join((os.path.realpath(__file__)).split('/')[:-3])))
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
__all__ = ['cuckoo_submit', 'vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'cve_advanced', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471']
'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors',
'qrcode', 'ocr-enrich', 'pdf-enrich', 'docx-enrich', 'xlsx-enrich', 'pptx-enrich',
'ods-enrich', 'odt-enrich', 'joesandbox_submit', 'joesandbox_query', 'urlhaus',
'virustotal_public']

View File

@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-
"""Backscatter.io Module."""
import json
try:
from backscatter import Backscatter
except ImportError:
print("Backscatter.io library not installed.")
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-src', 'ip-dst'], 'output': ['freetext']}
moduleinfo = {'version': '1', 'author': 'brandon@backscatter.io',
'description': 'Backscatter.io module to bring mass-scanning observations into MISP.',
'module-type': ['expansion', 'hover']}
moduleconfig = ['api_key']
query_playbook = [
{'inputs': ['ip-src', 'ip-dst'],
'services': ['observations', 'enrichment'],
'name': 'generic'}
]
def check_query(request):
"""Check the incoming request for a valid configuration."""
output = {'success': False}
config = request.get('config', None)
if not config:
misperrors['error'] = "Configuration is missing from the request."
return output
for item in moduleconfig:
if config.get(item, None):
continue
misperrors['error'] = "Backscatter.io authentication is missing."
return output
if not request.get('ip-src') and request.get('ip-dst'):
misperrors['error'] = "Unsupported attributes type."
return output
profile = {'success': True, 'config': config, 'playbook': 'generic'}
if 'ip-src' in request:
profile.update({'value': request.get('ip-src')})
else:
profile.update({'value': request.get('ip-dst')})
return profile
def handler(q=False):
"""Handle gathering data."""
if not q:
return q
request = json.loads(q)
checks = check_query(request)
if not checks['success']:
return misperrors
try:
bs = Backscatter(checks['config']['api_key'])
response = bs.get_observations(query=checks['value'], query_type='ip')
if not response['success']:
misperrors['error'] = '%s: %s' % (response['error'], response['message'])
return misperrors
output = {'results': [{'types': mispattributes['output'], 'values': [str(response)]}]}
except Exception as e:
misperrors['error'] = str(e)
return misperrors
return output
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,44 @@
import json
import sys
try:
from dns.resolver import Resolver, NXDOMAIN
from dns.name import LabelTooLong
resolver = Resolver()
resolver.timeout = 1
resolver.lifetime = 1
except ImportError:
sys.exit("dnspython3 in missing. use 'pip install dnspython3' to install it.")
misperrors = {'error': 'Error'}
mispattributes = {'input': ['btc'], 'output': ['text']}
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
'description': 'Checks if a BTC address has been abused.',
'module-type': ['hover']}
moduleconfig = []
url = 'bl.btcblack.it'
def handler(q=False):
if q is False:
return False
request = json.loads(q)
btc = request['btc']
query = f"{btc}.{url}"
try:
result = ' - '.join([str(r) for r in resolver.query(query, 'TXT')])[1:-1]
except NXDOMAIN:
result = f"{btc} is not known as a scam address."
except LabelTooLong:
result = f"{btc} is probably not a valid BTC address."
return {'results': [{'types': mispattributes['output'], 'values': result}]}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,4 +1,3 @@
import sys
import json
import requests
import time
@ -91,6 +90,7 @@ def mprint(input):
def handler(q=False):
global result_text
global conversion_rates
result_text = ""
# start_time = time.time()
# now = time.time()
if q is False:
@ -105,7 +105,6 @@ def handler(q=False):
btc = request['btc']
else:
return False
mprint("\nAddress:\t" + btc)
try:
req = requests.get(blockchain_all.format(btc, "&limit=50"))
@ -113,8 +112,18 @@ def handler(q=False):
except Exception:
# print(e)
print(req.text)
result_text = ""
sys.exit(1)
result_text = "Not a valid BTC address"
r = {
'results': [
{
'types': ['text'],
'values':[
str(result_text)
]
}
]
}
return r
n_tx = jreq['n_tx']
balance = float(jreq['final_balance'] / 100000000)

View File

@ -0,0 +1,153 @@
import base64
import io
import json
import logging
import requests
import sys
import urllib.parse
import zipfile
from requests.exceptions import RequestException
log = logging.getLogger("cuckoo_submit")
log.setLevel(logging.DEBUG)
sh = logging.StreamHandler(sys.stdout)
sh.setLevel(logging.DEBUG)
fmt = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
sh.setFormatter(fmt)
log.addHandler(sh)
moduleinfo = {
"version": "0.1", "author": "Evert Kors",
"description": "Submit files and URLs to Cuckoo Sandbox",
"module-type": ["expansion", "hover"]
}
misperrors = {"error": "Error"}
moduleconfig = ["api_url", "api_key"]
mispattributes = {
"input": ["attachment", "malware-sample", "url", "domain"],
"output": ["text"]
}
class APIKeyError(RequestException):
"""Raised if the Cuckoo API returns a 401. This means no or an invalid
bearer token was supplied."""
pass
class CuckooAPI(object):
def __init__(self, api_url, api_key=""):
self.api_key = api_key
if not api_url.startswith("http"):
api_url = "https://{}".format(api_url)
self.api_url = api_url
def _post_api(self, endpoint, files=None, data={}):
data.update({
"owner": "MISP"
})
try:
response = requests.post(
urllib.parse.urljoin(self.api_url, endpoint),
files=files, data=data,
headers={"Authorization": "Bearer {}".format(self.api_key)}
)
except RequestException as e:
log.error("Failed to submit sample to Cuckoo Sandbox. %s", e)
return None
if response.status_code == 401:
raise APIKeyError("Invalid or no Cuckoo Sandbox API key provided")
if response.status_code != 200:
log.error("Invalid Cuckoo API response")
return None
return response.json()
def create_task(self, filename, fp):
response = self._post_api(
"/tasks/create/file", files={"file": (filename, fp)}
)
if not response:
return False
return response["task_id"]
def create_url(self, url):
response = self._post_api(
"/tasks/create/url", data={"url": url}
)
if not response:
return False
return response["task_id"]
def handler(q=False):
if q is False:
return False
request = json.loads(q)
# See if the API URL was provided. The API key is optional, as it can
# be disabled in the Cuckoo API settings.
api_url = request["config"].get("api_url")
api_key = request["config"].get("api_key", "")
if not api_url:
misperrors["error"] = "No Cuckoo API URL provided"
return misperrors
url = request.get("url") or request.get("domain")
data = request.get("data")
filename = None
if data:
data = base64.b64decode(data)
if "malware-sample" in request:
filename = request.get("malware-sample").split("|", 1)[0]
with zipfile.ZipFile(io.BytesIO(data)) as zipf:
data = zipf.read(zipf.namelist()[0], pwd=b"infected")
elif "attachment" in request:
filename = request.get("attachment")
cuckoo_api = CuckooAPI(api_url=api_url, api_key=api_key)
task_id = None
try:
if url:
log.debug("Submitting URL to Cuckoo Sandbox %s", api_url)
task_id = cuckoo_api.create_url(url)
elif data and filename:
log.debug("Submitting file to Cuckoo Sandbox %s", api_url)
task_id = cuckoo_api.create_task(
filename=filename, fp=io.BytesIO(data)
)
except APIKeyError as e:
misperrors["error"] = "Failed to submit to Cuckoo: {}".format(e)
return misperrors
if not task_id:
misperrors["error"] = "File or URL submission failed"
return misperrors
return {
"results": [
{"types": "text", "values": "Cuckoo task id: {}".format(task_id)}
]
}
def introspection():
return mispattributes
def version():
moduleinfo["config"] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,75 @@
from pymisp import MISPEvent, MISPObject
import json
import requests
misperrors = {'error': 'Error'}
mispattributes = {'input': ['vulnerability'], 'format': 'misp_standard'}
moduleinfo = {'version': '1', 'author': 'Christian Studer',
'description': 'An expansion module to enrich a CVE attribute with the vulnerability information.',
'module-type': ['expansion', 'hover']}
moduleconfig = []
cveapi_url = 'https://cve.circl.lu/api/cve/'
class VulnerabilityParser():
def __init__(self, vulnerability):
self.vulnerability = vulnerability
self.misp_event = MISPEvent()
self.vulnerability_mapping = {
'id': ('text', 'id'), 'summary': ('text', 'summary'),
'vulnerable_configuration_cpe_2_2': ('text', 'vulnerable_configuration'),
'Modified': ('datetime', 'modified'), 'Published': ('datetime', 'published'),
'references': ('link', 'references'), 'cvss': ('float', 'cvss-score')}
def get_result(self):
event = json.loads(self.misp_event.to_json())['Event']
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
def parse_vulnerability_information(self):
vulnerability_object = MISPObject('vulnerability')
for feature in ('id', 'summary', 'Modified', 'cvss'):
value = self.vulnerability.get(feature)
if value:
attribute_type, relation = self.vulnerability_mapping[feature]
vulnerability_object.add_attribute(relation, **{'type': attribute_type, 'value': value})
if 'Published' in self.vulnerability:
vulnerability_object.add_attribute('published', **{'type': 'datetime', 'value': self.vulnerability['Published']})
vulnerability_object.add_attribute('state', **{'type': 'text', 'value': 'Published'})
for feature in ('references', 'vulnerable_configuration_cpe_2_2'):
if feature in self.vulnerability:
attribute_type, relation = self.vulnerability_mapping[feature]
for value in self.vulnerability[feature]:
vulnerability_object.add_attribute(relation, **{'type': attribute_type, 'value': value})
self.misp_event.add_object(**vulnerability_object)
def handler(q=False):
if q is False:
return False
request = json.loads(q)
attribute = request.get('attribute')
if attribute.get('type') != 'vulnerability':
misperrors['error'] = 'Vulnerability id missing.'
return misperrors
r = requests.get("{}{}".format(cveapi_url, attribute['value']))
if r.status_code == 200:
vulnerability = r.json()
if not vulnerability:
misperrors['error'] = 'Non existing CVE'
return misperrors['error']
else:
misperrors['error'] = 'cve.circl.lu API not accessible'
return misperrors['error']
parser = VulnerabilityParser(vulnerability)
parser.parse_vulnerability_information()
return parser.get_result()
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,61 @@
import json
import binascii
import np
import docx
import io
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': '.docx to freetext-import IOC extractor',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
docx_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
doc_content = ""
doc_file = io.BytesIO(docx_array)
try:
doc = docx.Document(doc_file)
for para in doc.paragraphs:
print(para.text)
doc_content = doc_content + "\n" + para.text
tables = doc.tables
for table in tables:
for row in table.rows:
for cell in row.cells:
for para in cell.paragraphs:
print(para.text)
doc_content = doc_content + "\n" + para.text
print(doc_content)
return {'results': [{'types': ['freetext'], 'values': doc_content, 'comment': ".docx-to-text from file " + filename},
{'types': ['text'], 'values': doc_content, 'comment': ".docx-to-text from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file as .docx. Error was: " + str(e)
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,43 @@
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['ip-dst', 'ip-src'], 'output': ['text']}
moduleinfo = {'version': '0.1', 'author': 'Aurélien Schwab <aurelien.schwab+dev@gmail.com>', 'description': 'Module to access GreyNoise.io API.', 'module-type': ['hover']}
moduleconfig = ['user-agent'] # TODO take this into account in the code
greynoise_api_url = 'http://api.greynoise.io:8888/v1/query/ip'
default_user_agent = 'MISP-Module'
def handler(q=False):
if q is False:
return False
request = json.loads(q)
for input_type in mispattributes['input']:
if input_type in request:
ip = request[input_type]
break
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
data = {'ip': ip}
r = requests.post(greynoise_api_url, data=data, headers={'user-agent': default_user_agent}) # Real request
if r.status_code == 200: # OK (record found)
response = json.loads(r.text)
if response:
return {'results': [{'types': mispattributes['output'], 'values': response}]}
elif r.status_code == 404: # Not found (not an error)
return {'results': [{'types': mispattributes['output'], 'values': 'No data'}]}
else: # Real error
misperrors['error'] = 'GreyNoise API not accessible (HTTP ' + str(r.status_code) + ')'
return misperrors['error']
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,43 @@
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['email-dst', 'email-src'], 'output': ['text']} # All mails as input
moduleinfo = {'version': '0.1', 'author': 'Aurélien Schwab', 'description': 'Module to access haveibeenpwned.com API.', 'module-type': ['hover']}
moduleconfig = ['user-agent'] # TODO take this into account in the code
haveibeenpwned_api_url = 'https://api.haveibeenpwned.com/api/v2/breachedaccount/'
default_user_agent = 'MISP-Module' # User agent (must be set, requiered by API))
def handler(q=False):
if q is False:
return False
request = json.loads(q)
for input_type in mispattributes['input']:
if input_type in request:
email = request[input_type]
break
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
r = requests.get(haveibeenpwned_api_url + email, headers={'user-agent': default_user_agent}) # Real request
if r.status_code == 200: # OK (record found)
breaches = json.loads(r.text)
if breaches:
return {'results': [{'types': mispattributes['output'], 'values': breaches}]}
elif r.status_code == 404: # Not found (not an error)
return {'results': [{'types': mispattributes['output'], 'values': 'OK (Not Found)'}]}
else: # Real error
misperrors['error'] = 'haveibeenpwned.com API not accessible (HTTP ' + str(r.status_code) + ')'
return misperrors['error']
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,58 @@
# -*- coding: utf-8 -*-
import jbxapi
import json
from joe_parser import JoeParser
misperrors = {'error': 'Error'}
mispattributes = {'input': ['link'], 'format': 'misp_standard'}
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
'description': 'Query Joe Sandbox API with a report URL to get the parsed data.',
'module-type': ['expansion']}
moduleconfig = ['apiurl', 'apikey']
def handler(q=False):
if q is False:
return False
request = json.loads(q)
apiurl = request['config'].get('apiurl') or 'https://jbxcloud.joesecurity.org/api'
apikey = request['config'].get('apikey')
if not apikey:
return {'error': 'No API key provided'}
url = request['attribute']['value']
if "/submissions/" not in url:
return {'error': "The URL does not point to a Joe Sandbox analysis."}
submission_id = url.split('/')[-1] # The URL has the format https://example.net/submissions/12345
joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent='MISP joesandbox_query')
try:
joe_info = joe.submission_info(submission_id)
except jbxapi.ApiError as e:
return {'error': str(e)}
if joe_info["status"] != "finished":
return {'error': "The analysis has not finished yet."}
if joe_info['most_relevant_analysis'] is None:
return {'error': "No analysis belongs to this submission."}
analysis_webid = joe_info['most_relevant_analysis']['webid']
joe_parser = JoeParser()
joe_data = json.loads(joe.analysis_download(analysis_webid, 'jsonfixed')[1])
joe_parser.parse_data(joe_data['analysis'])
joe_parser.finalize_results()
return {'results': joe_parser.results}
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,140 @@
import jbxapi
import base64
import io
import json
import logging
import sys
import zipfile
import re
from urllib.parse import urljoin
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
sh = logging.StreamHandler(sys.stdout)
sh.setLevel(logging.DEBUG)
fmt = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
sh.setFormatter(fmt)
log.addHandler(sh)
moduleinfo = {
"version": "1.0",
"author": "Joe Security LLC",
"description": "Submit files and URLs to Joe Sandbox",
"module-type": ["expansion", "hover"]
}
moduleconfig = [
"apiurl",
"apikey",
"accept-tac",
"report-cache",
"systems",
]
mispattributes = {
"input": ["attachment", "malware-sample", "url", "domain"],
"output": ["link"],
}
def handler(q=False):
if q is False:
return False
request = json.loads(q)
apiurl = request["config"].get("apiurl") or "https://jbxcloud.joesecurity.org/api"
apikey = request["config"].get("apikey")
# systems
systems = request["config"].get("systems") or ""
systems = [s.strip() for s in re.split(r"[\s,;]", systems) if s.strip()]
try:
accept_tac = _parse_bool(request["config"].get("accept-tac"), "accept-tac")
report_cache = _parse_bool(request["config"].get("report-cache"), "report-cache")
except _ParseError as e:
return {"error": str(e)}
params = {
"report-cache": report_cache,
"systems": systems,
}
if not apikey:
return {"error": "No API key provided"}
joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent="MISP joesandbox_submit", accept_tac=accept_tac)
try:
is_url_submission = "url" in request or "domain" in request
if is_url_submission:
url = request.get("url") or request.get("domain")
log.info("Submitting URL: %s", url)
result = joe.submit_url(url, params=params)
else:
if "malware-sample" in request:
filename = request.get("malware-sample").split("|", 1)[0]
data = _decode_malware(request["data"], True)
elif "attachment" in request:
filename = request["attachment"]
data = _decode_malware(request["data"], False)
data_fp = io.BytesIO(data)
log.info("Submitting sample: %s", filename)
result = joe.submit_sample((filename, data_fp), params=params)
assert "submission_id" in result
except jbxapi.JoeException as e:
return {"error": str(e)}
link_to_analysis = urljoin(apiurl, "../submissions/{}".format(result["submission_id"]))
return {
"results": [{
"types": "link",
"categories": "External analysis",
"values": link_to_analysis,
}]
}
def introspection():
return mispattributes
def version():
moduleinfo["config"] = moduleconfig
return moduleinfo
def _decode_malware(data, is_encrypted):
data = base64.b64decode(data)
if is_encrypted:
with zipfile.ZipFile(io.BytesIO(data)) as zipf:
data = zipf.read(zipf.namelist()[0], pwd=b"infected")
return data
class _ParseError(Exception):
pass
def _parse_bool(value, name="bool"):
if value is None or value == "":
return None
if value == "true":
return True
if value == "false":
return False
raise _ParseError("Cannot parse {}. Must be 'true' or 'false'".format(name))

View File

@ -0,0 +1,42 @@
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['mac-address'], 'output': ['text']}
moduleinfo = {'version': '0.1', 'author': 'Aurélien Schwab', 'description': 'Module to access Macvendors API.', 'module-type': ['hover']}
moduleconfig = ['user-agent'] # TODO take this into account in the code
macvendors_api_url = 'https://api.macvendors.com/'
default_user_agent = 'MISP-Module'
def handler(q=False):
if q is False:
return False
request = json.loads(q)
for input_type in mispattributes['input']:
if input_type in request:
mac = request[input_type]
break
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
r = requests.get(macvendors_api_url + mac, headers={'user-agent': default_user_agent}) # Real request
if r.status_code == 200: # OK (record found)
response = r.text
if response:
return {'results': [{'types': mispattributes['output'], 'values': response}]}
elif r.status_code == 404: # Not found (not an error)
return {'results': [{'types': mispattributes['output'], 'values': 'Not found'}]}
else: # Real error
misperrors['error'] = 'MacVendors API not accessible (HTTP ' + str(r.status_code) + ')'
return misperrors['error']
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,50 @@
import json
import binascii
import cv2
import np
import pytesseract
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': 'OCR decoder',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
img_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
image = img_array
image = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
try:
decoded = pytesseract.image_to_string(image)
return {'results': [{'types': ['freetext'], 'values': decoded, 'comment': "OCR from file " + filename},
{'types': ['text'], 'values': decoded, 'comment': "ORC from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file type. Only images are supported right now."
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,56 @@
import json
import binascii
import np
import ezodf
import pandas_ods_reader
import io
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': '.ods to freetext-import IOC extractor',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
ods_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
ods_content = ""
ods_file = io.BytesIO(ods_array)
doc = ezodf.opendoc(ods_file)
num_sheets = len(doc.sheets)
try:
for i in range(0, num_sheets):
ods = pandas_ods_reader.read_ods(ods_file, i, headers=False)
ods_content = ods_content + "\n" + ods.to_string(max_rows=None)
print(ods_content)
return {'results': [{'types': ['freetext'], 'values': ods_content, 'comment': ".ods-to-text from file " + filename},
{'types': ['text'], 'values': ods_content, 'comment': ".ods-to-text from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file as .ods. Error was: " + str(e)
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,51 @@
import json
import binascii
import np
from ODTReader.odtreader import odtToText
import io
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': '.odt to freetext-import IOC extractor',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
odt_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
odt_content = ""
odt_file = io.BytesIO(odt_array)
try:
odt_content = odtToText(odt_file)
print(odt_content)
return {'results': [{'types': ['freetext'], 'values': odt_content, 'comment': ".odt-to-text from file " + filename},
{'types': ['text'], 'values': odt_content, 'comment': ".odt-to-text from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file as .odt. Error was: " + str(e)
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,48 @@
import json
import binascii
import np
import pdftotext
import io
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': 'PDF to freetext-import IOC extractor',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
pdf_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
pdf_file = io.BytesIO(pdf_array)
try:
pdf_content = "\n\n".join(pdftotext.PDF(pdf_file))
return {'results': [{'types': ['freetext'], 'values': pdf_content, 'comment': "PDF-to-text from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file as PDF. Error was: " + str(e)
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,55 @@
import json
import binascii
import np
from pptx import Presentation
import io
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': '.pptx to freetext-import IOC extractor',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
pptx_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
ppt_content = ""
ppt_file = io.BytesIO(pptx_array)
try:
ppt = Presentation(ppt_file)
for slide in ppt.slides:
for shape in slide.shapes:
if hasattr(shape, "text"):
print(shape.text)
ppt_content = ppt_content + "\n" + shape.text
return {'results': [{'types': ['freetext'], 'values': ppt_content, 'comment': ".pptx-to-text from file " + filename},
{'types': ['text'], 'values': ppt_content, 'comment': ".pptx-to-text from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file as .pptx. Error was: " + str(e)
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,89 @@
import json
from pyzbar import pyzbar
import cv2
import re
import binascii
import np
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['url', 'btc']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': 'QR code decoder',
'module-type': ['expansion', 'hover']}
debug = True
debug_prefix = "[DEBUG] QR Code module: "
# format example: bitcoin:1GXZ6v7FZzYBEnoRaG77SJxhu7QkvQmFuh?amount=0.15424
# format example: http://example.com
cryptocurrencies = ['bitcoin']
schemas = ['http://', 'https://', 'ftp://']
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
img_array = np.fromstring(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
print(e)
return misperrors
image = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
if q:
barcodes = pyzbar.decode(image)
for item in barcodes:
try:
result = item.data.decode()
except Exception as e:
print(e)
return
if debug:
print(debug_prefix + result)
for item in cryptocurrencies:
if item in result:
try:
currency, address, extra = re.split(r'\:|\?', result)
except Exception as e:
print(e)
if currency in cryptocurrencies:
try:
amount = re.split('=', extra)[1]
if debug:
print(debug_prefix + address)
print(debug_prefix + amount)
return {'results': [{'types': ['btc'], 'values': address, 'comment': "BTC: " + amount + " from file " + filename}]}
except Exception as e:
print(e)
else:
print(address)
for item in schemas:
if item in result:
try:
url = result
if debug:
print(debug_prefix + url)
return {'results': [{'types': ['url'], 'values': url, 'comment': "from QR code of file " + filename}]}
except Exception as e:
print(e)
else:
try:
return {'results': [{'types': ['text'], 'values': result, 'comment': "from QR code of file " + filename}]}
except Exception as e:
print(e)
misperrors['error'] = "Couldn't decode QR code in attachment."
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,10 +1,8 @@
import sys
import io
import json
try:
from sigma.parser.collection import SigmaCollectionParser
from sigma.configuration import SigmaConfiguration
from sigma.backends.base import BackendOptions
from sigma.backends.discovery import getBackend
except ImportError:
print("sigma or yaml is missing, use 'pip3 install sigmatools' to install it.")
@ -25,24 +23,20 @@ def handler(q=False):
misperrors['error'] = 'Sigma rule missing'
return misperrors
config = SigmaConfiguration()
backend_options = BackendOptions(None)
f = io.TextIOWrapper(io.BytesIO(request.get('sigma').encode()), encoding='utf-8')
parser = SigmaCollectionParser(f, config, None)
parser = SigmaCollectionParser(f, config)
targets = []
old_stdout = sys.stdout
result = io.StringIO()
sys.stdout = result
results = []
for t in sigma_targets:
backend = getBackend(t)(config, backend_options, None)
backend = getBackend(t)(config, {'rulecomment': False})
try:
parser.generate(backend)
backend.finalize()
print("#NEXT")
targets.append(t)
result = backend.finalize()
if result:
results.append(result)
targets.append(t)
except Exception:
continue
sys.stdout = old_stdout
results = result.getvalue()[:-5].split('#NEXT')
d_result = {t: r.strip() for t, r in zip(targets, results)}
return {'results': [{'types': mispattributes['output'], 'values': d_result}]}

View File

@ -0,0 +1,135 @@
from pymisp import MISPAttribute, MISPEvent, MISPObject
import json
import requests
misperrors = {'error': 'Error'}
mispattributes = {'input': ['domain', 'hostname', 'ip-src', 'ip-dst', 'md5', 'sha256', 'url'],
'output': ['url', 'filename', 'md5', 'sha256'],
'format': 'misp_standard'}
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
'description': 'Query of the URLhaus API to get additional information about some attributes.',
'module-type': ['expansion', 'hover']}
moduleconfig = []
file_keys = ('filename', 'response_size', 'response_md5', 'response_sha256')
file_relations = ('filename', 'size-in-bytes', 'md5', 'sha256')
vt_keys = ('result', 'link')
vt_types = ('text', 'link')
vt_relations = ('detection-ratio', 'permalink')
class URLhaus():
def __init__(self):
super(URLhaus, self).__init__()
self.misp_event = MISPEvent()
@staticmethod
def _create_vt_object(virustotal):
vt_object = MISPObject('virustotal-report')
for key, vt_type, relation in zip(vt_keys, vt_types, vt_relations):
vt_object.add_attribute(relation, **{'type': vt_type, 'value': virustotal[key]})
return vt_object
def get_result(self):
event = json.loads(self.misp_event.to_json())['Event']
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
class HostQuery(URLhaus):
def __init__(self, attribute):
super(HostQuery, self).__init__()
self.attribute = MISPAttribute()
self.attribute.from_dict(**attribute)
self.url = 'https://urlhaus-api.abuse.ch/v1/host/'
def query_api(self):
response = requests.post(self.url, data={'host': self.attribute.value}).json()
if 'urls' in response and response['urls']:
for url in response['urls']:
self.misp_event.add_attribute(type='url', value=url['url'])
class PayloadQuery(URLhaus):
def __init__(self, attribute):
super(PayloadQuery, self).__init__()
self.attribute = MISPAttribute()
self.attribute.from_dict(**attribute)
self.url = 'https://urlhaus-api.abuse.ch/v1/payload/'
def query_api(self):
hash_type = self.attribute.type
file_object = MISPObject('file')
if self.attribute.event_id != '0':
file_object.id = self.attribute.object_id
response = requests.post(self.url, data={'{}_hash'.format(hash_type): self.attribute.value}).json()
other_hash_type = 'md5' if hash_type == 'sha256' else 'sha256'
for key, relation in zip(('{}_hash'.format(other_hash_type), 'file_size'), (other_hash_type, 'size-in-bytes')):
if response[key]:
file_object.add_attribute(relation, **{'type': relation, 'value': response[key]})
if response['virustotal']:
vt_object = self._create_vt_object(response['virustotal'])
file_object.add_reference(vt_object.uuid, 'analyzed-with')
self.misp_event.add_object(**vt_object)
_filename_ = 'filename'
for url in response['urls']:
attribute = MISPAttribute()
attribute.from_dict(type='url', value=url['url'])
self.misp_event.add_attribute(**attribute)
file_object.add_reference(attribute.uuid, 'retrieved-from')
if url[_filename_]:
file_object.add_attribute(_filename_, **{'type': _filename_, 'value': url[_filename_]})
if any((file_object.attributes, file_object.references)):
self.misp_event.add_object(**file_object)
class UrlQuery(URLhaus):
def __init__(self, attribute):
super(UrlQuery, self).__init__()
self.attribute = MISPAttribute()
self.attribute.from_dict(**attribute)
self.url = 'https://urlhaus-api.abuse.ch/v1/url/'
@staticmethod
def _create_file_object(payload):
file_object = MISPObject('file')
for key, relation in zip(file_keys, file_relations):
if payload[key]:
file_object.add_attribute(relation, **{'type': relation, 'value': payload[key]})
return file_object
def query_api(self):
response = requests.post(self.url, data={'url': self.attribute.value}).json()
if 'payloads' in response and response['payloads']:
for payload in response['payloads']:
file_object = self._create_file_object(payload)
if payload['virustotal']:
vt_object = self._create_vt_object(payload['virustotal'])
file_object.add_reference(vt_object.uuid, 'analyzed-with')
self.misp_event.add_object(**vt_object)
if any((file_object.attributes, file_object.references)):
self.misp_event.add_object(**file_object)
_misp_type_mapping = {'url': UrlQuery, 'md5': PayloadQuery, 'sha256': PayloadQuery,
'domain': HostQuery, 'hostname': HostQuery,
'ip-src': HostQuery, 'ip-dst': HostQuery}
def handler(q=False):
if q is False:
return False
request = json.loads(q)
attribute = request['attribute']
urlhaus_parser = _misp_type_mapping[attribute['type']](attribute)
urlhaus_parser.query_api()
return urlhaus_parser.get_result()
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,167 +1,208 @@
from pymisp import MISPAttribute, MISPEvent, MISPObject
import json
import requests
from requests import HTTPError
import base64
from collections import defaultdict
misperrors = {'error': 'Error'}
mispattributes = {'input': ['hostname', 'domain', "ip-src", "ip-dst", "md5", "sha1", "sha256", "sha512"],
'output': ['domain', "ip-src", "ip-dst", "text", "md5", "sha1", "sha256", "sha512", "ssdeep",
"authentihash", "filename"]}
mispattributes = {'input': ['hostname', 'domain', "ip-src", "ip-dst", "md5", "sha1", "sha256", "sha512", "url"],
'format': 'misp_standard'}
# possible module-types: 'expansion', 'hover' or both
moduleinfo = {'version': '3', 'author': 'Hannah Ward',
moduleinfo = {'version': '4', 'author': 'Hannah Ward',
'description': 'Get information from virustotal',
'module-type': ['expansion']}
# config fields that your code expects from the site admin
moduleconfig = ["apikey", "event_limit"]
comment = '{}: Enriched via VirusTotal'
hash_types = ["md5", "sha1", "sha256", "sha512"]
moduleconfig = ["apikey"]
class VirusTotalRequest(object):
def __init__(self, config):
self.apikey = config['apikey']
self.limit = int(config.get('event_limit', 5))
# TODO: Parse the report with a private API key to be able to get more advanced results from a query with 'allinfo' set to True
class VirusTotalParser(object):
def __init__(self, apikey):
self.apikey = apikey
self.base_url = "https://www.virustotal.com/vtapi/v2/{}/report"
self.results = defaultdict(set)
self.to_return = []
self.input_types_mapping = {'ip-src': self.get_ip, 'ip-dst': self.get_ip,
'domain': self.get_domain, 'hostname': self.get_domain,
'md5': self.get_hash, 'sha1': self.get_hash,
'sha256': self.get_hash, 'sha512': self.get_hash}
self.output_types_mapping = {'submission_names': 'filename', 'ssdeep': 'ssdeep',
'authentihash': 'authentihash', 'ITW_urls': 'url'}
self.misp_event = MISPEvent()
self.parsed_objects = {}
self.input_types_mapping = {'ip-src': self.parse_ip, 'ip-dst': self.parse_ip,
'domain': self.parse_domain, 'hostname': self.parse_domain,
'md5': self.parse_hash, 'sha1': self.parse_hash,
'sha256': self.parse_hash, 'sha512': self.parse_hash,
'url': self.parse_url}
def parse_request(self, q):
req_values = set()
for attribute_type, attribute_value in q.items():
req_values.add(attribute_value)
try:
error = self.input_types_mapping[attribute_type](attribute_value)
except KeyError:
continue
if error is not None:
return error
for key, values in self.results.items():
values = values.difference(req_values)
if values:
if isinstance(key, tuple):
types, comment = key
self.to_return.append({'types': list(types), 'values': list(values), 'comment': comment})
else:
self.to_return.append({'types': key, 'values': list(values)})
return self.to_return
def query_api(self, attribute):
self.attribute = MISPAttribute()
self.attribute.from_dict(**attribute)
return self.input_types_mapping[self.attribute.type](self.attribute.value, recurse=True)
def get_domain(self, domain, do_not_recurse=False):
req = requests.get(self.base_url.format('domain'), params={'domain': domain, 'apikey': self.apikey})
try:
req.raise_for_status()
def get_result(self):
event = json.loads(self.misp_event.to_json())['Event']
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
################################################################################
#### Main parsing functions #### # noqa
################################################################################
def parse_domain(self, domain, recurse=False):
req = requests.get(self.base_url.format('domain'), params={'apikey': self.apikey, 'domain': domain})
if req.status_code != 200:
return req.status_code
req = req.json()
hash_type = 'sha256'
whois = 'whois'
feature_types = {'communicating': 'communicates-with',
'downloaded': 'downloaded-from',
'referrer': 'referring'}
siblings = (self.parse_siblings(domain) for domain in req['domain_siblings'])
uuid = self.parse_resolutions(req['resolutions'], req['subdomains'], siblings)
for feature_type, relationship in feature_types.items():
for feature in ('undetected_{}_samples', 'detected_{}_samples'):
for sample in req.get(feature.format(feature_type), []):
status_code = self.parse_hash(sample[hash_type], False, uuid, relationship)
if status_code != 200:
return status_code
if req.get(whois):
whois_object = MISPObject(whois)
whois_object.add_attribute('text', type='text', value=req[whois])
self.misp_event.add_object(**whois_object)
return self.parse_related_urls(req, recurse, uuid)
def parse_hash(self, sample, recurse=False, uuid=None, relationship=None):
req = requests.get(self.base_url.format('file'), params={'apikey': self.apikey, 'resource': sample})
status_code = req.status_code
if req.status_code == 200:
req = req.json()
except HTTPError as e:
return str(e)
if req["response_code"] == 0:
# Nothing found
return []
if "resolutions" in req:
for res in req["resolutions"][:self.limit]:
ip_address = res["ip_address"]
self.results[(("ip-dst", "ip-src"), comment.format(domain))].add(ip_address)
# Pivot from here to find all domain info
if not do_not_recurse:
error = self.get_ip(ip_address, True)
if error is not None:
return error
self.get_more_info(req)
vt_uuid = self.parse_vt_object(req)
file_attributes = []
for hash_type in ('md5', 'sha1', 'sha256'):
if req.get(hash_type):
file_attributes.append({'type': hash_type, 'object_relation': hash_type,
'value': req[hash_type]})
if file_attributes:
file_object = MISPObject('file')
for attribute in file_attributes:
file_object.add_attribute(**attribute)
file_object.add_reference(vt_uuid, 'analyzed-with')
if uuid and relationship:
file_object.add_reference(uuid, relationship)
self.misp_event.add_object(**file_object)
return status_code
def get_hash(self, _hash):
req = requests.get(self.base_url.format('file'), params={'resource': _hash, 'apikey': self.apikey, 'allinfo': 1})
try:
req.raise_for_status()
def parse_ip(self, ip, recurse=False):
req = requests.get(self.base_url.format('ip-address'), params={'apikey': self.apikey, 'ip': ip})
if req.status_code != 200:
return req.status_code
req = req.json()
if req.get('asn'):
asn_mapping = {'network': ('ip-src', 'subnet-announced'),
'country': ('text', 'country')}
asn_object = MISPObject('asn')
asn_object.add_attribute('asn', type='AS', value=req['asn'])
for key, value in asn_mapping.items():
if req.get(key):
attribute_type, relation = value
asn_object.add_attribute(relation, type=attribute_type, value=req[key])
self.misp_event.add_object(**asn_object)
uuid = self.parse_resolutions(req['resolutions']) if req.get('resolutions') else None
return self.parse_related_urls(req, recurse, uuid)
def parse_url(self, url, recurse=False, uuid=None):
req = requests.get(self.base_url.format('url'), params={'apikey': self.apikey, 'resource': url})
status_code = req.status_code
if req.status_code == 200:
req = req.json()
except HTTPError as e:
return str(e)
if req["response_code"] == 0:
# Nothing found
return []
self.get_more_info(req)
vt_uuid = self.parse_vt_object(req)
if not recurse:
feature = 'url'
url_object = MISPObject(feature)
url_object.add_attribute(feature, type=feature, value=url)
url_object.add_reference(vt_uuid, 'analyzed-with')
if uuid:
url_object.add_reference(uuid, 'hosted-in')
self.misp_event.add_object(**url_object)
return status_code
def get_ip(self, ip, do_not_recurse=False):
req = requests.get(self.base_url.format('ip-address'), params={'ip': ip, 'apikey': self.apikey})
try:
req.raise_for_status()
req = req.json()
except HTTPError as e:
return str(e)
if req["response_code"] == 0:
# Nothing found
return []
if "resolutions" in req:
for res in req["resolutions"][:self.limit]:
hostname = res["hostname"]
self.results[(("domain",), comment.format(ip))].add(hostname)
# Pivot from here to find all domain info
if not do_not_recurse:
error = self.get_domain(hostname, True)
if error is not None:
return error
self.get_more_info(req)
################################################################################
#### Additional parsing functions #### # noqa
################################################################################
def find_all(self, data):
hashes = []
if isinstance(data, dict):
for key, value in data.items():
if key in hash_types:
self.results[key].add(value)
hashes.append(value)
else:
if isinstance(value, (dict, list)):
hashes.extend(self.find_all(value))
elif isinstance(data, list):
for d in data:
hashes.extend(self.find_all(d))
return hashes
def parse_related_urls(self, query_result, recurse, uuid=None):
if recurse:
for feature in ('detected_urls', 'undetected_urls'):
if feature in query_result:
for url in query_result[feature]:
value = url['url'] if isinstance(url, dict) else url[0]
status_code = self.parse_url(value, False, uuid)
if status_code != 200:
return status_code
else:
for feature in ('detected_urls', 'undetected_urls'):
if feature in query_result:
for url in query_result[feature]:
value = url['url'] if isinstance(url, dict) else url[0]
self.misp_event.add_attribute('url', value)
return 200
def get_more_info(self, req):
# Get all hashes first
hashes = self.find_all(req)
for h in hashes[:self.limit]:
# Search VT for some juicy info
try:
data = requests.get(self.base_url.format('file'), params={'resource': h, 'apikey': self.apikey, 'allinfo': 1}).json()
except Exception:
continue
# Go through euch key and check if it exists
for VT_type, MISP_type in self.output_types_mapping.items():
if VT_type in data:
try:
self.results[((MISP_type,), comment.format(h))].add(data[VT_type])
except TypeError:
self.results[((MISP_type,), comment.format(h))].update(data[VT_type])
# Get the malware sample
sample = requests.get(self.base_url[:-6].format('file/download'), params={'hash': h, 'apikey': self.apikey})
malsample = sample.content
# It is possible for VT to not give us any submission names
if "submission_names" in data:
self.to_return.append({"types": ["malware-sample"], "categories": ["Payload delivery"],
"values": data["submimssion_names"], "data": str(base64.b64encore(malsample), 'utf-8')})
def parse_resolutions(self, resolutions, subdomains=None, uuids=None):
domain_ip_object = MISPObject('domain-ip')
if self.attribute.type == 'domain':
domain_ip_object.add_attribute('domain', type='domain', value=self.attribute.value)
attribute_type, relation, key = ('ip-dst', 'ip', 'ip_address')
else:
domain_ip_object.add_attribute('ip', type='ip-dst', value=self.attribute.value)
attribute_type, relation, key = ('domain', 'domain', 'hostname')
for resolution in resolutions:
domain_ip_object.add_attribute(relation, type=attribute_type, value=resolution[key])
if subdomains:
for subdomain in subdomains:
attribute = MISPAttribute()
attribute.from_dict(**dict(type='domain', value=subdomain))
self.misp_event.add_attribute(**attribute)
domain_ip_object.add_reference(attribute.uuid, 'subdomain')
if uuids:
for uuid in uuids:
domain_ip_object.add_reference(uuid, 'sibling-of')
self.misp_event.add_object(**domain_ip_object)
return domain_ip_object.uuid
def parse_siblings(self, domain):
attribute = MISPAttribute()
attribute.from_dict(**dict(type='domain', value=domain))
self.misp_event.add_attribute(**attribute)
return attribute.uuid
def parse_vt_object(self, query_result):
vt_object = MISPObject('virustotal-report')
vt_object.add_attribute('permalink', type='link', value=query_result['permalink'])
detection_ratio = '{}/{}'.format(query_result['positives'], query_result['total'])
vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio)
self.misp_event.add_object(**vt_object)
return vt_object.uuid
def parse_error(status_code):
status_mapping = {204: 'VirusTotal request rate limit exceeded.',
400: 'Incorrect request, please check the arguments.',
403: 'You don\'t have enough privileges to make the request.'}
if status_code in status_mapping:
return status_mapping[status_code]
return "VirusTotal may not be accessible."
def handler(q=False):
if q is False:
return False
q = json.loads(q)
if not q.get('config') or not q['config'].get('apikey'):
request = json.loads(q)
if not request.get('config') or not request['config'].get('apikey'):
misperrors['error'] = "A VirusTotal api key is required for this module."
return misperrors
del q['module']
query = VirusTotalRequest(q.pop('config'))
r = query.parse_request(q)
if isinstance(r, str):
misperrors['error'] = r
parser = VirusTotalParser(request['config']['apikey'])
attribute = request['attribute']
status = parser.query_api(attribute)
if status != 200:
misperrors['error'] = parse_error(status)
return misperrors
return {'results': r}
return parser.get_result()
def introspection():

View File

@ -0,0 +1,193 @@
from pymisp import MISPAttribute, MISPEvent, MISPObject
import json
import requests
misperrors = {'error': 'Error'}
mispattributes = {'input': ['hostname', 'domain', "ip-src", "ip-dst", "md5", "sha1", "sha256", "sha512", "url"],
'format': 'misp_standard'}
moduleinfo = {'version': '1', 'author': 'Christian Studer',
'description': 'Get information from virustotal public API v2.',
'module-type': ['expansion', 'hover']}
moduleconfig = ['apikey']
class VirusTotalParser():
def __init__(self):
super(VirusTotalParser, self).__init__()
self.misp_event = MISPEvent()
def declare_variables(self, apikey, attribute):
self.attribute = MISPAttribute()
self.attribute.from_dict(**attribute)
self.apikey = apikey
def get_result(self):
event = json.loads(self.misp_event.to_json())['Event']
results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
return {'results': results}
def parse_urls(self, query_result):
for feature in ('detected_urls', 'undetected_urls'):
if feature in query_result:
for url in query_result[feature]:
value = url['url'] if isinstance(url, dict) else url[0]
self.misp_event.add_attribute('url', value)
def parse_resolutions(self, resolutions, subdomains=None, uuids=None):
domain_ip_object = MISPObject('domain-ip')
if self.attribute.type == 'domain':
domain_ip_object.add_attribute('domain', type='domain', value=self.attribute.value)
attribute_type, relation, key = ('ip-dst', 'ip', 'ip_address')
else:
domain_ip_object.add_attribute('ip', type='ip-dst', value=self.attribute.value)
attribute_type, relation, key = ('domain', 'domain', 'hostname')
for resolution in resolutions:
domain_ip_object.add_attribute(relation, type=attribute_type, value=resolution[key])
if subdomains:
for subdomain in subdomains:
attribute = MISPAttribute()
attribute.from_dict(**dict(type='domain', value=subdomain))
self.misp_event.add_attribute(**attribute)
domain_ip_object.add_reference(attribute.uuid, 'subdomain')
if uuids:
for uuid in uuids:
domain_ip_object.add_reference(uuid, 'sibling-of')
self.misp_event.add_object(**domain_ip_object)
def parse_vt_object(self, query_result):
vt_object = MISPObject('virustotal-report')
vt_object.add_attribute('permalink', type='link', value=query_result['permalink'])
detection_ratio = '{}/{}'.format(query_result['positives'], query_result['total'])
vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio)
self.misp_event.add_object(**vt_object)
def get_query_result(self, query_type):
params = {query_type: self.attribute.value, 'apikey': self.apikey}
return requests.get(self.base_url, params=params)
class DomainQuery(VirusTotalParser):
def __init__(self, apikey, attribute):
super(DomainQuery, self).__init__()
self.base_url = "https://www.virustotal.com/vtapi/v2/domain/report"
self.declare_variables(apikey, attribute)
def parse_report(self, query_result):
hash_type = 'sha256'
whois = 'whois'
for feature_type in ('referrer', 'downloaded', 'communicating'):
for feature in ('undetected_{}_samples', 'detected_{}_samples'):
for sample in query_result.get(feature.format(feature_type), []):
self.misp_event.add_attribute(hash_type, sample[hash_type])
if query_result.get(whois):
whois_object = MISPObject(whois)
whois_object.add_attribute('text', type='text', value=query_result[whois])
self.misp_event.add_object(**whois_object)
siblings = (self.parse_siblings(domain) for domain in query_result['domain_siblings'])
self.parse_resolutions(query_result['resolutions'], query_result['subdomains'], siblings)
self.parse_urls(query_result)
def parse_siblings(self, domain):
attribute = MISPAttribute()
attribute.from_dict(**dict(type='domain', value=domain))
self.misp_event.add_attribute(**attribute)
return attribute.uuid
class HashQuery(VirusTotalParser):
def __init__(self, apikey, attribute):
super(HashQuery, self).__init__()
self.base_url = "https://www.virustotal.com/vtapi/v2/file/report"
self.declare_variables(apikey, attribute)
def parse_report(self, query_result):
file_attributes = []
for hash_type in ('md5', 'sha1', 'sha256'):
if query_result.get(hash_type):
file_attributes.append({'type': hash_type, 'object_relation': hash_type,
'value': query_result[hash_type]})
if file_attributes:
file_object = MISPObject('file')
for attribute in file_attributes:
file_object.add_attribute(**attribute)
self.misp_event.add_object(**file_object)
self.parse_vt_object(query_result)
class IpQuery(VirusTotalParser):
def __init__(self, apikey, attribute):
super(IpQuery, self).__init__()
self.base_url = "https://www.virustotal.com/vtapi/v2/ip-address/report"
self.declare_variables(apikey, attribute)
def parse_report(self, query_result):
if query_result.get('asn'):
asn_mapping = {'network': ('ip-src', 'subnet-announced'),
'country': ('text', 'country')}
asn_object = MISPObject('asn')
asn_object.add_attribute('asn', type='AS', value=query_result['asn'])
for key, value in asn_mapping.items():
if query_result.get(key):
attribute_type, relation = value
asn_object.add_attribute(relation, type=attribute_type, value=query_result[key])
self.misp_event.add_object(**asn_object)
self.parse_urls(query_result)
if query_result.get('resolutions'):
self.parse_resolutions(query_result['resolutions'])
class UrlQuery(VirusTotalParser):
def __init__(self, apikey, attribute):
super(UrlQuery, self).__init__()
self.base_url = "https://www.virustotal.com/vtapi/v2/url/report"
self.declare_variables(apikey, attribute)
def parse_report(self, query_result):
self.parse_vt_object(query_result)
domain = ('domain', DomainQuery)
ip = ('ip', IpQuery)
file = ('resource', HashQuery)
misp_type_mapping = {'domain': domain, 'hostname': domain, 'ip-src': ip,
'ip-dst': ip, 'md5': file, 'sha1': file, 'sha256': file,
'sha512': file, 'url': ('resource', UrlQuery)}
def parse_error(status_code):
status_mapping = {204: 'VirusTotal request rate limit exceeded.',
400: 'Incorrect request, please check the arguments.',
403: 'You don\'t have enough privileges to make the request.'}
if status_code in status_mapping:
return status_mapping[status_code]
return "VirusTotal may not be accessible."
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if not request.get('config') or not request['config'].get('apikey'):
misperrors['error'] = "A VirusTotal api key is required for this module."
return misperrors
attribute = request['attribute']
query_type, to_call = misp_type_mapping[attribute['type']]
parser = to_call(request['config']['apikey'], attribute)
query_result = parser.get_query_result(query_type)
status_code = query_result.status_code
if status_code == 200:
parser.parse_report(query_result.json())
else:
misperrors['error'] = parse_error(status_code)
return misperrors
return parser.get_result()
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -3,10 +3,12 @@
'''
Submit sample to VMRay.
Submit a sample to VMRay
Requires "vmray_rest_api"
TODO:
# Deal with archive submissions
The expansion module vmray_submit and import module vmray_import are a two step
process to import data from VMRay.
You can automate this by setting the PyMISP example script 'vmray_automation'
as a cron job
'''
@ -129,13 +131,13 @@ def vmrayProcess(vmraydata):
# Result received?
if submissions and jobs:
r = {'results': []}
r["results"].append({"types": "md5", "values": submissions["submission_sample_md5"]})
r["results"].append({"types": "sha1", "values": submissions["submission_sample_sha1"]})
r["results"].append({"types": "sha256", "values": submissions["submission_sample_sha256"]})
r["results"].append({"types": "text", "values": "VMRay Sample ID: %s" % submissions["submission_sample_id"]})
r["results"].append({"types": "text", "values": "VMRay Submission ID: %s" % submissions["submission_id"]})
r["results"].append({"types": "text", "values": "VMRay Submission Sample IP: %s" % submissions["submission_ip_ip"]})
r["results"].append({"types": "link", "values": submissions["submission_webif_url"]})
r['results'].append({'types': 'md5', 'values': submissions['submission_sample_md5']})
r['results'].append({'types': 'sha1', 'values': submissions['submission_sample_sha1']})
r['results'].append({'types': 'sha256', 'values': submissions['submission_sample_sha256']})
r['results'].append({'types': 'text', 'values': 'VMRay Sample ID: %s' % submissions['submission_sample_id'], 'tags': 'workflow:state="incomplete"'})
r['results'].append({'types': 'text', 'values': 'VMRay Submission ID: %s' % submissions['submission_id']})
r['results'].append({'types': 'text', 'values': 'VMRay Submission Sample IP: %s' % submissions['submission_ip_ip']})
r['results'].append({'types': 'link', 'values': submissions['submission_webif_url']})
# Include data from different jobs
if include_vmrayjobids:

View File

@ -0,0 +1,53 @@
import json
import binascii
import np
import pandas
import io
misperrors = {'error': 'Error'}
mispattributes = {'input': ['attachment'],
'output': ['freetext', 'text']}
moduleinfo = {'version': '0.1', 'author': 'Sascha Rommelfangen',
'description': '.xlsx to freetext-import IOC extractor',
'module-type': ['expansion']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
filename = q['attachment']
try:
xlsx_array = np.frombuffer(binascii.a2b_base64(q['data']), np.uint8)
except Exception as e:
print(e)
err = "Couldn't fetch attachment (JSON 'data' is empty). Are you using the 'Query enrichment' action?"
misperrors['error'] = err
print(err)
return misperrors
xls_content = ""
xls_file = io.BytesIO(xlsx_array)
pandas.set_option('display.max_colwidth', -1)
try:
xls = pandas.read_excel(xls_file)
xls_content = xls.to_string(max_rows=None)
print(xls_content)
return {'results': [{'types': ['freetext'], 'values': xls_content, 'comment': ".xlsx-to-text from file " + filename},
{'types': ['text'], 'values': xls_content, 'comment': ".xlsx-to-text from file " + filename}]}
except Exception as e:
print(e)
err = "Couldn't analyze file as .xlsx. Error was: " + str(e)
misperrors['error'] = err
return misperrors
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -0,0 +1,140 @@
######################################################
# #
# Author: Stanislav Klevtsov, Ukraine; Feb 2019. #
# #
# #
# Script was tested on the following configuration: #
# MISP v2.4.90 #
# Cisco Firesight Manager Console v6.2.3 (bld 84) #
# #
######################################################
import json
import base64
from urllib.parse import quote
misperrors = {'error': 'Error'}
moduleinfo = {'version': '1', 'author': 'Stanislav Klevtsov',
'description': 'Export malicious network activity attributes of the MISP event to Cisco firesight manager block rules',
'module-type': ['export']}
moduleconfig = ['fmc_ip_addr', 'fmc_login', 'fmc_pass', 'domain_id', 'acpolicy_id']
fsmapping = {"ip-dst": "dst", "url": "request"}
mispattributes = {'input': list(fsmapping.keys())}
# options: event, attribute, event-collection, attribute-collection
inputSource = ['event']
outputFileExtension = 'sh'
responseType = 'application/txt'
# .sh file templates
SH_FILE_HEADER = """#!/bin/sh\n\n"""
BLOCK_JSON_TMPL = """
BLOCK_RULE='{{ "action": "BLOCK", "enabled": true, "type": "AccessRule", "name": "{rule_name}", "destinationNetworks": {{ "literals": [ {dst_networks} ] }}, "urls": {{ "literals": [ {urls} ] }}, "newComments": [ "{event_info_comment}" ] }}'\n
"""
BLOCK_DST_JSON_TMPL = """{{ "type": "Host", "value": "{ipdst}" }} """
BLOCK_URL_JSON_TMPL = """{{ "type": "Url", "url": "{url}" }} """
CURL_ADD_RULE_TMPL = """
curl -X POST -v -k -H 'Content-Type: application/json' -H \"Authorization: Basic $LOGINPASS_BASE64\" -H \"X-auth-access-token: $ACC_TOKEN\" -i \"https://$FIRESIGHT_IP_ADDR/api/fmc_config/v1/domain/$DOMAIN_ID/policy/accesspolicies/$ACPOLICY_ID/accessrules\" --data \"$BLOCK_RULE\" """
def handler(q=False):
if q is False:
return False
r = {'results': []}
request = json.loads(q)
if "config" in request:
config = request["config"]
# check if config is empty
if not config['fmc_ip_addr']:
config['fmc_ip_addr'] = "0.0.0.0"
if not config['fmc_login']:
config['fmc_login'] = "login"
if not config['fmc_pass']:
config['fmc_pass'] = "password"
if not config['domain_id']:
config['domain_id'] = "SET_FIRESIGHT_DOMAIN_ID"
if not config['acpolicy_id']:
config['acpolicy_id'] = "SET_FIRESIGHT_ACPOLICY_ID"
data = request["data"]
output = ""
ipdst = []
urls = []
# populate the ACL rule with attributes
for ev in data:
event = ev["Attribute"]
event_id = ev["Event"]["id"]
event_info = ev["Event"]["info"]
for index, attr in enumerate(event):
if attr["to_ids"] is True:
if attr["type"] in fsmapping:
if attr["type"] == "ip-dst":
ipdst.append(BLOCK_DST_JSON_TMPL.format(ipdst=attr["value"]))
else:
urls.append(BLOCK_URL_JSON_TMPL.format(url=quote(attr["value"], safe='@/:;?&=-_.,+!*')))
# building the .sh file
output += SH_FILE_HEADER
output += "FIRESIGHT_IP_ADDR='{}'\n".format(config['fmc_ip_addr'])
output += "LOGINPASS_BASE64=`echo -n '{}:{}' | base64`\n".format(config['fmc_login'], config['fmc_pass'])
output += "DOMAIN_ID='{}'\n".format(config['domain_id'])
output += "ACPOLICY_ID='{}'\n\n".format(config['acpolicy_id'])
output += "ACC_TOKEN=`curl -X POST -v -k -sD - -o /dev/null -H \"Authorization: Basic $LOGINPASS_BASE64\" -i \"https://$FIRESIGHT_IP_ADDR/api/fmc_platform/v1/auth/generatetoken\" | grep -i x-auth-acc | sed 's/.*:\\ //g' | tr -d '[:space:]' | tr -d '\\n'`\n"
output += BLOCK_JSON_TMPL.format(rule_name="misp_event_{}".format(event_id),
dst_networks=', '.join(ipdst),
urls=', '.join(urls),
event_info_comment=event_info) + "\n"
output += CURL_ADD_RULE_TMPL
# END building the .sh file
r = {"data": base64.b64encode(output.encode('utf-8')).decode('utf-8')}
return r
def introspection():
modulesetup = {}
try:
responseType
modulesetup['responseType'] = responseType
except NameError:
pass
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
outputFileExtension
modulesetup['outputFileExtension'] = outputFileExtension
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -1,67 +1,29 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from datetime import date
import json
import shlex
import subprocess
import base64
from pymisp import MISPEvent
from pymisp.tools import reportlab_generator
misperrors = {'error': 'Error'}
moduleinfo = {'version': '1',
'author': 'Raphaël Vinot',
moduleinfo = {'version': '2',
'author': 'Vincent Falconieri (prev. Raphaël Vinot)',
'description': 'Simple export to PDF',
'module-type': ['export'],
'require_standard_format': True}
moduleconfig = []
# config fields that your code expects from the site admin
moduleconfig = ["MISP_base_url_for_dynamic_link", "MISP_name_for_metadata", "Activate_textual_description", "Activate_galaxy_description", "Activate_related_events", "Activate_internationalization_fonts", "Custom_fonts_path"]
mispattributes = {}
outputFileExtension = "pdf"
responseType = "application/pdf"
types_to_attach = ['ip-dst', 'url', 'domain']
objects_to_attach = ['domain-ip']
headers = """
:toc: right
:toclevels: 1
:toc-title: Daily Report
:icons: font
:sectanchors:
:sectlinks:
= Daily report by {org_name}
{date}
:icons: font
"""
event_level_tags = """
IMPORTANT: This event is classified TLP:{value}.
{expanded}
"""
attributes = """
=== Indicator(s) of compromise
{list_attributes}
"""
title = """
== ({internal_id}) {title}
{summary}
"""
class ReportGenerator():
def __init__(self):
@ -79,60 +41,6 @@ class ReportGenerator():
self.misp_event = MISPEvent()
self.misp_event.load(event)
def attributes(self):
if not self.misp_event.attributes:
return ''
list_attributes = []
for attribute in self.misp_event.attributes:
if attribute.type in types_to_attach:
list_attributes.append("* {}".format(attribute.value))
for obj in self.misp_event.Object:
if obj.name in objects_to_attach:
for attribute in obj.Attribute:
if attribute.type in types_to_attach:
list_attributes.append("* {}".format(attribute.value))
return attributes.format(list_attributes="\n".join(list_attributes))
def _get_tag_info(self, machinetag):
return self.taxonomies.revert_machinetag(machinetag)
def report_headers(self):
content = {'org_name': 'name',
'date': date.today().isoformat()}
self.report += headers.format(**content)
def event_level_tags(self):
if not self.misp_event.Tag:
return ''
for tag in self.misp_event.Tag:
# Only look for TLP for now
if tag['name'].startswith('tlp'):
tax, predicate = self._get_tag_info(tag['name'])
return self.event_level_tags.format(value=predicate.predicate.upper(), expanded=predicate.expanded)
def title(self):
internal_id = ''
summary = ''
# Get internal refs for report
if not hasattr(self.misp_event, 'Object'):
return ''
for obj in self.misp_event.Object:
if obj.name != 'report':
continue
for a in obj.Attribute:
if a.object_relation == 'case-number':
internal_id = a.value
if a.object_relation == 'summary':
summary = a.value
return title.format(internal_id=internal_id, title=self.misp_event.info,
summary=summary)
def asciidoc(self, lang='en'):
self.report += self.title()
self.report += self.event_level_tags()
self.report += self.attributes()
def handler(q=False):
if q is False:
@ -143,18 +51,20 @@ def handler(q=False):
if 'data' not in request:
return False
for evt in request['data']:
report = ReportGenerator()
report.report_headers()
report.from_event(evt)
report.asciidoc()
config = {}
command_line = 'asciidoctor-pdf -'
args = shlex.split(command_line)
with subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE) as process:
cmd_out, cmd_err = process.communicate(
input=report.report.encode('utf-8'))
return {'response': [], 'data': str(base64.b64encode(cmd_out), 'utf-8')}
# Construct config object for reportlab_generator
for config_item in moduleconfig:
if (request.get('config')) and (request['config'].get(config_item) is not None):
config[config_item] = request['config'].get(config_item)
for evt in request['data']:
misp_event = MISPEvent()
misp_event.load(evt)
pdf = reportlab_generator.get_base64_from_value(reportlab_generator.convert_event_in_pdf_buffer(misp_event, config))
return {'response': [], 'data': str(pdf, 'utf-8')}
def introspection():

View File

@ -1,3 +1,6 @@
from . import _vmray # noqa
import os
import sys
sys.path.append('{}/lib'.format('/'.join((os.path.realpath(__file__)).split('/')[:-3])))
__all__ = ['vmray_import', 'ocr', 'cuckooimport', 'goamlimport', 'email_import', 'mispjson', 'openiocimport', 'threatanalyzer_import', 'csvimport']
__all__ = ['vmray_import', 'ocr', 'cuckooimport', 'goamlimport', 'email_import', 'mispjson', 'openiocimport', 'threatanalyzer_import', 'csvimport', 'joe_import']

View File

@ -1,35 +1,78 @@
# -*- coding: utf-8 -*-
from pymisp import MISPEvent, MISPObject
from pymisp import __path__ as pymisp_path
import csv
import io
import json
import os
import base64
import pymisp
misperrors = {'error': 'Error'}
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
'description': 'Import Attributes from a csv file.',
'module-type': ['import']}
moduleconfig = []
inputSource = ['file']
userConfig = {'header': {
'type': 'String',
'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'},
'message': 'Define the header of the csv file, with types (included in MISP attribute types or attribute fields) separated by commas.\nFor fields that do not match these types or that you want to skip, please use space or simply nothing between commas.\nFor instance: ip-src,domain, ,timestamp'},
'has_header': {
'type': 'Boolean',
'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, in the file (which will be skipped atm).'
'message': 'Tick this box ONLY if there is a header line, NOT COMMENTED, and all the fields of this header are respecting the recommendations above.'
}}
mispattributes = {'userConfig': userConfig, 'inputSource': ['file'], 'format': 'misp_standard'}
duplicatedFields = {'mispType': {'mispComment': 'comment'},
'attrField': {'attrComment': 'comment'}}
attributesFields = ['type', 'value', 'category', 'to_ids', 'comment', 'distribution']
misp_standard_csv_header = ['uuid', 'event_id', 'category', 'type', 'value', 'comment', 'to_ids', 'date',
'object_relation', 'attribute_tag', 'object_uuid', 'object_name', 'object_meta_category']
misp_context_additional_fields = ['event_info', 'event_member_org', 'event_source_org', 'event_distribution',
'event_threat_level_id', 'event_analysis', 'event_date', 'event_tag']
misp_extended_csv_header = misp_standard_csv_header + misp_context_additional_fields
delimiters = [',', ';', '|', '/', '\t', ' ']
class CsvParser():
def __init__(self, header, has_header):
self.header = header
self.fields_number = len(header)
self.has_header = has_header
self.attributes = []
def __init__(self, header, has_header, data):
data_header = data[0]
self.misp_event = MISPEvent()
if data_header == misp_standard_csv_header or data_header == misp_extended_csv_header:
self.header = misp_standard_csv_header if data_header == misp_standard_csv_header else misp_extended_csv_header[:13]
self.from_misp = True
self.data = data[1:]
else:
self.from_misp = False
self.has_header = has_header
if header:
self.header = header
self.fields_number = len(header)
self.parse_data(data)
else:
self.has_delimiter = True
self.fields_number, self.delimiter, self.header = self.get_delimiter_from_header(data[0])
self.data = data
descFilename = os.path.join(pymisp_path[0], 'data/describeTypes.json')
with open(descFilename, 'r') as f:
self.MispTypes = json.loads(f.read())['result'].get('types')
for h in self.header:
if not (h in self.MispTypes or h in misp_extended_csv_header):
misperrors['error'] = 'Wrong header field: {}. Please use a header value that can be recognized by MISP (or alternatively skip it using a whitespace).'.format(h)
return misperrors
def get_delimiter_from_header(self, data):
delimiters_count = {}
for d in delimiters:
length = data.count(d)
if length > 0:
delimiters_count[d] = data.count(d)
if len(delimiters_count) == 0:
length = 0
delimiter = None
header = [data]
else:
length, delimiter = max((n, v) for v, n in delimiters_count.items())
header = data.split(delimiter)
return length + 1, delimiter, header
def parse_data(self, data):
return_data = []
@ -48,6 +91,8 @@ class CsvParser():
return_data.append(line)
# find which delimiter is used
self.delimiter = self.find_delimiter()
if self.fields_number == 0:
self.header = return_data[0].split(self.delimiter)
self.data = return_data[1:] if self.has_header else return_data
def parse_delimiter(self, line):
@ -59,6 +104,38 @@ class CsvParser():
_, delimiter = max((n, v) for v, n in self.delimiter_count.items())
return delimiter
def parse_csv(self):
if self.from_misp:
self.build_misp_event()
else:
self.buildAttributes()
def build_misp_event(self):
objects = {}
header_length = len(self.header)
attribute_fields = self.header[:1] + self.header[2:6] + self.header[7:8]
for line in self.data:
attribute = {}
try:
a_uuid, _, a_category, a_type, value, comment, to_ids, timestamp, relation, tag, o_uuid, o_name, o_category = line[:header_length]
except ValueError:
continue
for t, v in zip(attribute_fields, (a_uuid, a_category, a_type, value, comment, timestamp)):
attribute[t] = v.strip('"')
attribute['to_ids'] = True if to_ids == '1' else False
if tag:
attribute['Tag'] = [{'name': t.strip()} for t in tag.split(',')]
if relation:
if o_uuid not in objects:
objects[o_uuid] = MISPObject(o_name)
objects[o_uuid].add_attribute(relation, **attribute)
else:
self.misp_event.add_attribute(**attribute)
for uuid, misp_object in objects.items():
misp_object.uuid = uuid
self.misp_event.add_object(**misp_object)
self.finalize_results()
def buildAttributes(self):
# if there is only 1 field of data
if self.delimiter is None:
@ -66,7 +143,7 @@ class CsvParser():
for data in self.data:
d = data.strip()
if d:
self.attributes.append({'types': mispType, 'values': d})
self.misp_event.add_attribute(**{'type': mispType, 'value': d})
else:
# split fields that should be recognized as misp attribute types from the others
list2pop, misp, head = self.findMispTypes()
@ -82,23 +159,21 @@ class CsvParser():
datamisp.append(datasplit.pop(l).strip())
# for each misp type, we create an attribute
for m, dm in zip(misp, datamisp):
attribute = {'types': m, 'values': dm}
attribute = {'type': m, 'value': dm}
for h, ds in zip(head, datasplit):
if h:
attribute[h] = ds.strip()
self.attributes.append(attribute)
self.misp_event.add_attribute(**attribute)
self.finalize_results()
def findMispTypes(self):
descFilename = os.path.join(pymisp.__path__[0], 'data/describeTypes.json')
with open(descFilename, 'r') as f:
MispTypes = json.loads(f.read())['result'].get('types')
list2pop = []
misp = []
head = []
for h in reversed(self.header):
n = self.header.index(h)
# fields that are misp attribute types
if h in MispTypes:
if h in self.MispTypes:
list2pop.append(n)
misp.append(h)
# handle confusions between misp attribute types and attribute fields
@ -118,6 +193,10 @@ class CsvParser():
# return list of indexes of the misp types, list of the misp types, remaining fields that will be attribute fields
return list2pop, misp, list(reversed(head))
def finalize_results(self):
event = json.loads(self.misp_event.to_json())['Event']
self.results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])}
def handler(q=False):
if q is False:
@ -125,37 +204,29 @@ def handler(q=False):
request = json.loads(q)
if request.get('data'):
data = base64.b64decode(request['data']).decode('utf-8')
data = [line for line in csv.reader(io.TextIOWrapper(io.BytesIO(data.encode()), encoding='utf-8'))]
else:
misperrors['error'] = "Unsupported attributes type"
return misperrors
if not request.get('config') and not request['config'].get('header'):
misperrors['error'] = "Configuration error"
return misperrors
header = request['config'].get('header').split(',')
header = [c.strip() for c in header]
has_header = request['config'].get('has_header')
has_header = True if has_header == '1' else False
csv_parser = CsvParser(header, has_header)
csv_parser.parse_data(data.split('\n'))
if not request.get('config') and not request['config'].get('header'):
if has_header:
header = []
else:
misperrors['error'] = "Configuration error"
return misperrors
else:
header = request['config'].get('header').split(',')
header = [c.strip() for c in header]
csv_parser = CsvParser(header, has_header, data)
# build the attributes
csv_parser.buildAttributes()
r = {'results': csv_parser.attributes}
return r
csv_parser.parse_csv()
return {'results': csv_parser.results}
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
return mispattributes
def version():

View File

@ -9,7 +9,8 @@ moduleinfo = {'version': 1, 'author': 'Christian Studer',
'description': 'Import from GoAML',
'module-type': ['import']}
moduleconfig = []
mispattributes = {'inputSource': ['file'], 'output': ['MISP objects']}
mispattributes = {'inputSource': ['file'], 'output': ['MISP objects'],
'format': 'misp_standard'}
t_from_objects = {'nodes': ['from_person', 'from_account', 'from_entity'],
'leaves': ['from_funds_code', 'from_country']}
@ -164,7 +165,7 @@ def handler(q=False):
misperrors['error'] = "Impossible to read XML data"
return misperrors
aml_parser.parse_xml()
r = {'results': [obj.to_json() for obj in aml_parser.misp_event.objects]}
r = {'results': {'Object': [obj.to_json() for obj in aml_parser.misp_event.objects]}}
return r

View File

@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
import base64
import json
from joe_parser import JoeParser
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.1', 'author': 'Christian Studer',
'description': 'Import for Joe Sandbox JSON reports',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
if q is False:
return False
q = json.loads(q)
data = base64.b64decode(q.get('data')).decode('utf-8')
if not data:
return json.dumps({'success': 0})
joe_parser = JoeParser()
joe_parser.parse_data(json.loads(data)['analysis'])
joe_parser.finalize_results()
return {'results': joe_parser.results}
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
modulesetup['format'] = 'misp_standard'
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo

View File

@ -8,68 +8,62 @@ This version supports import from different analyze jobs, starting from one samp
Requires "vmray_rest_api"
TODO:
# Import one job (analyze_id)
# Import STIX package (XML version)
The expansion module vmray_submit and import module vmray_import are a two step
process to import data from VMRay.
You can automate this by setting the PyMISP example script 'vmray_automation'
as a cron job
'''
import json
import re
from ._vmray.vmray_rest_api import VMRayRESTAPI
misperrors = {'error': 'Error'}
inputSource = []
moduleinfo = {'version': '0.1', 'author': 'Koen Van Impe',
'description': 'Import VMRay (VTI) results',
moduleinfo = {'version': '0.2', 'author': 'Koen Van Impe',
'description': 'Import VMRay results',
'module-type': ['import']}
userConfig = {'include_textdescr': {'type': 'Boolean',
'message': 'Include textual description'
},
'include_analysisid': {'type': 'Boolean',
'message': 'Include VMRay analysis_id text'
userConfig = {'include_analysisid': {'type': 'Boolean',
'message': 'Include link to VMRay analysis'
},
'only_network_info': {'type': 'Boolean',
'message': 'Only include network (src-ip, hostname, domain, ...) information'
},
'include_analysisdetails': {'type': 'Boolean',
'message': 'Include (textual) analysis details'
},
'include_vtidetails': {'type': 'Boolean',
'message': 'Include VMRay Threat Identifier (VTI) rules'
},
'include_imphash_ssdeep': {'type': 'Boolean',
'message': 'Include imphash and ssdeep'
},
'include_extracted_files': {'type': 'Boolean',
'message': 'Include extracted files section'
},
'sample_id': {'type': 'Integer',
'errorMessage': 'Expected a sample ID',
'message': 'The VMRay sample_id'
}
}
moduleconfig = ['apikey', 'url']
include_textdescr = False
include_analysisid = False
only_network_info = False
moduleconfig = ['apikey', 'url', 'wait_period']
def handler(q=False):
global include_textdescr
global include_analysisid
global only_network_info
global include_analysisid, include_imphash_ssdeep, include_extracted_files, include_analysisdetails, include_vtidetails, include_static_to_ids
if q is False:
return False
request = json.loads(q)
include_textdescr = request["config"].get("include_textdescr")
include_analysisid = request["config"].get("include_analysisid")
only_network_info = request["config"].get("only_network_info")
if include_textdescr == "1":
include_textdescr = True
else:
include_textdescr = False
if include_analysisid == "1":
include_analysisid = True
else:
include_analysisid = False
if only_network_info == "1":
only_network_info = True
else:
only_network_info = False
include_analysisid = bool(int(request["config"].get("include_analysisid")))
include_imphash_ssdeep = bool(int(request["config"].get("include_imphash_ssdeep")))
include_extracted_files = bool(int(request["config"].get("include_extracted_files")))
include_analysisdetails = bool(int(request["config"].get("include_extracted_files")))
include_vtidetails = bool(int(request["config"].get("include_vtidetails")))
include_static_to_ids = True
# print("include_analysisid: %s include_imphash_ssdeep: %s include_extracted_files: %s include_analysisdetails: %s include_vtidetails: %s" % ( include_analysisid, include_imphash_ssdeep, include_extracted_files, include_analysisdetails, include_vtidetails))
sample_id = int(request["config"].get("sample_id"))
@ -81,44 +75,67 @@ def handler(q=False):
try:
api = VMRayRESTAPI(request["config"].get("url"), request["config"].get("apikey"), False)
vmray_results = {'results': []}
# Get all information on the sample, returns a set of finished analyze jobs
data = vmrayGetInfoAnalysis(api, sample_id)
if data["data"]:
vti_patterns_found = False
for analysis in data["data"]:
analysis_id = analysis["analysis_id"]
analysis_id = int(analysis["analysis_id"])
if analysis_id > 0:
# Get the details for an analyze job
analysis_data = vmrayDownloadAnalysis(api, analysis_id)
if analysis_data:
if "analysis_vti_patterns" in analysis_data:
p = vmrayVtiPatterns(analysis_data["analysis_vti_patterns"])
else:
p = vmrayVtiPatterns(analysis_data["vti_patterns"])
if p and len(p["results"]) > 0:
vti_patterns_found = True
vmray_results = {'results': vmray_results["results"] + p["results"]}
if include_analysisdetails and "analysis_details" in analysis_data:
analysis_details = vmrayAnalysisDetails(analysis_data["analysis_details"], analysis_id)
if analysis_details and len(analysis_details["results"]) > 0:
vmray_results = {'results': vmray_results["results"] + analysis_details["results"]}
if "classifications" in analysis_data:
classifications = vmrayClassifications(analysis_data["classifications"], analysis_id)
if classifications and len(classifications["results"]) > 0:
vmray_results = {'results': vmray_results["results"] + classifications["results"]}
if include_extracted_files and "extracted_files" in analysis_data:
extracted_files = vmrayExtractedfiles(analysis_data["extracted_files"])
if extracted_files and len(extracted_files["results"]) > 0:
vmray_results = {'results': vmray_results["results"] + extracted_files["results"]}
if include_vtidetails and "vti" in analysis_data:
vti = vmrayVti(analysis_data["vti"])
if vti and len(vti["results"]) > 0:
vmray_results = {'results': vmray_results["results"] + vti["results"]}
if "artifacts" in analysis_data:
artifacts = vmrayArtifacts(analysis_data["artifacts"])
if artifacts and len(artifacts["results"]) > 0:
vmray_results = {'results': vmray_results["results"] + artifacts["results"]}
if include_analysisid:
a_id = {'results': []}
url1 = "https://cloud.vmray.com/user/analysis/view?from_sample_id=%u" % sample_id
url1 = request["config"].get("url") + "/user/analysis/view?from_sample_id=%u" % sample_id
url2 = "&id=%u" % analysis_id
url3 = "&sub=%2Freport%2Foverview.html"
a_id["results"].append({"values": url1 + url2 + url3, "types": "link"})
vmray_results = {'results': vmray_results["results"] + a_id["results"]}
# Clean up (remove doubles)
if vti_patterns_found:
if len(vmray_results["results"]) > 0:
vmray_results = vmrayCleanup(vmray_results)
return vmray_results
else:
misperrors['error'] = "No vti_results returned or jobs not finished"
return misperrors
else:
if "result" in data:
if data["result"] == "ok":
return vmray_results
# Fallback
misperrors['error'] = "Unable to fetch sample id %u" % (sample_id)
return misperrors
except Exception:
misperrors['error'] = "Unable to access VMRay API"
except Exception as e: # noqa
misperrors['error'] = "Unable to access VMRay API : %s" % (e)
return misperrors
else:
misperrors['error'] = "Not a valid sample id"
@ -158,80 +175,212 @@ def vmrayGetInfoAnalysis(api, sample_id):
def vmrayDownloadAnalysis(api, analysis_id):
''' Get the details from an analysis'''
if analysis_id:
data = api.call("GET", "/rest/analysis/%u/archive/additional/vti_result.json" % (analysis_id), raw_data=True)
return json.loads(data.read().decode())
try:
data = api.call("GET", "/rest/analysis/%u/archive/logs/summary.json" % (analysis_id), raw_data=True)
return json.loads(data.read().decode())
except Exception as e: # noqa
misperrors['error'] = "Unable to download summary.json for analysis %s" % (analysis_id)
return misperrors
else:
return False
def vmrayVtiPatterns(vti_patterns):
''' Match the VTI patterns to MISP data'''
def vmrayVti(vti):
'''VMRay Threat Identifier (VTI) rules that matched for this analysis'''
if vti_patterns:
if vti:
r = {'results': []}
for rule in vti:
if rule == "vti_rule_matches":
vti_rule = vti["vti_rule_matches"]
for el in vti_rule:
if "operation_desc" in el:
comment = ""
types = ["text"]
values = el["operation_desc"]
r['results'].append({'types': types, 'values': values, 'comment': comment})
return r
else:
return False
def vmrayExtractedfiles(extracted_files):
''' Information about files which were extracted during the analysis, such as files that were created, modified, or embedded by the malware'''
if extracted_files:
r = {'results': []}
for file in extracted_files:
if "file_type" and "norm_filename" in file:
comment = "%s - %s" % (file["file_type"], file["norm_filename"])
else:
comment = ""
if "norm_filename" in file:
attr_filename_c = file["norm_filename"].rsplit("\\", 1)
if len(attr_filename_c) > 1:
attr_filename = attr_filename_c[len(attr_filename_c) - 1]
else:
attr_filename = "vmray_sample"
else:
attr_filename = "vmray_sample"
if "md5_hash" in file and file["md5_hash"] is not None:
r['results'].append({'types': ["filename|md5"], 'values': '{}|{}'.format(attr_filename, file["md5_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if include_imphash_ssdeep and "imp_hash" in file and file["imp_hash"] is not None:
r['results'].append({'types': ["filename|imphash"], 'values': '{}|{}'.format(attr_filename, file["imp_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if "sha1_hash" in file and file["sha1_hash"] is not None:
r['results'].append({'types': ["filename|sha1"], 'values': '{}|{}'.format(attr_filename, file["sha1_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if "sha256_hash" in file and file["sha256_hash"] is not None:
r['results'].append({'types': ["filename|sha256"], 'values': '{}|{}'.format(attr_filename, file["sha256_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if include_imphash_ssdeep and "ssdeep_hash" in file and file["ssdeep_hash"] is not None:
r['results'].append({'types': ["filename|ssdeep"], 'values': '{}|{}'.format(attr_filename, file["ssdeep_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
return r
else:
return False
def vmrayClassifications(classification, analysis_id):
''' List the classifications, tag them on a "text" attribute '''
if classification:
r = {'results': []}
types = ["text"]
comment = ""
values = "Classification : %s " % (", ".join(str(x) for x in classification))
r['results'].append({'types': types, 'values': values, 'comment': comment})
return r
else:
return False
def vmrayAnalysisDetails(details, analysis_id):
''' General information about the analysis information '''
if details:
r = {'results': []}
types = ["text"]
comment = ""
if "execution_successful" in details:
values = "Analysis %s : execution_successful : %s " % (analysis_id, str(details["execution_successful"]))
r['results'].append({'types': types, 'values': values, 'comment': comment})
if "termination_reason" in details:
values = "Analysis %s : termination_reason : %s " % (analysis_id, str(details["termination_reason"]))
r['results'].append({'types': types, 'values': values, 'comment': comment})
if "result_str" in details:
values = "Analysis %s : result : %s " % (analysis_id, details["result_str"])
r['results'].append({'types': types, 'values': values, 'comment': comment})
return r
else:
return False
def vmrayArtifacts(patterns):
''' IOCs that were seen during the analysis '''
if patterns:
r = {'results': []}
y = {'results': []}
for pattern in vti_patterns:
content = False
if pattern["category"] == "_network" and pattern["operation"] == "_download_data":
content = vmrayGeneric(pattern, "url", 1)
elif pattern["category"] == "_network" and pattern["operation"] == "_connect":
content = vmrayConnect(pattern)
elif pattern["category"] == "_network" and pattern["operation"] == "_install_server":
content = vmrayGeneric(pattern)
for pattern in patterns:
if pattern == "domains":
for el in patterns[pattern]:
values = el["domain"]
types = ["domain", "hostname"]
if "sources" in el:
sources = el["sources"]
comment = "Found in: " + ", ".join(str(x) for x in sources)
else:
comment = ""
r['results'].append({'types': types, 'values': values, 'comment': comment, 'to_ids': include_static_to_ids})
if pattern == "files":
for el in patterns[pattern]:
filename_values = el["filename"]
attr_filename_c = filename_values.rsplit("\\", 1)
if len(attr_filename_c) > 1:
attr_filename = attr_filename_c[len(attr_filename_c) - 1]
else:
attr_filename = ""
filename_types = ["filename"]
filename_operations = el["operations"]
comment = "File operations: " + ", ".join(str(x) for x in filename_operations)
r['results'].append({'types': filename_types, 'values': filename_values, 'comment': comment})
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_alloc_wx_page":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_install_ipc_endpoint":
content = vmrayGeneric(pattern, "mutex", 1)
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_crashed_process":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_read_from_remote_process":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_process" and pattern["operation"] == "_create_process_with_hidden_window":
content = vmrayGeneric(pattern)
# Run through all hashes
if "hashes" in el:
for hash in el["hashes"]:
if "md5_hash" in hash and hash["md5_hash"] is not None:
r['results'].append({'types': ["filename|md5"], 'values': '{}|{}'.format(attr_filename, hash["md5_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if include_imphash_ssdeep and "imp_hash" in hash and hash["imp_hash"] is not None:
r['results'].append({'types': ["filename|imphash"], 'values': '{}|{}'.format(attr_filename, hash["imp_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if "sha1_hash" in hash and hash["sha1_hash"] is not None:
r['results'].append({'types': ["filename|sha1"], 'values': '{}|{}'.format(attr_filename, hash["sha1_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if "sha256_hash" in hash and hash["sha256_hash"] is not None:
r['results'].append({'types': ["filename|sha256"], 'values': '{}|{}'.format(attr_filename, hash["sha256_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if include_imphash_ssdeep and "ssdeep_hash" in hash and hash["ssdeep_hash"] is not None:
r['results'].append({'types': ["filename|ssdeep"], 'values': '{}|{}'.format(attr_filename, hash["ssdeep_hash"]), 'comment': comment, 'categories': ['Payload delivery', 'Artifacts dropped'], 'to_ids': include_static_to_ids})
if pattern == "ips":
for el in patterns[pattern]:
values = el["ip_address"]
types = ["ip-dst"]
if "sources" in el:
sources = el["sources"]
comment = "Found in: " + ", ".join(str(x) for x in sources)
else:
comment = ""
elif only_network_info is False and pattern["category"] == "_anti_analysis" and pattern["operation"] == "_delay_execution":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_anti_analysis" and pattern["operation"] == "_dynamic_api_usage":
content = vmrayGeneric(pattern)
r['results'].append({'types': types, 'values': values, 'comment': comment, 'to_ids': include_static_to_ids})
if pattern == "mutexes":
for el in patterns[pattern]:
values = el["mutex_name"]
types = ["mutex"]
if "operations" in el:
sources = el["operations"]
comment = "Operations: " + ", ".join(str(x) for x in sources)
else:
comment = ""
elif only_network_info is False and pattern["category"] == "_static" and pattern["operation"] == "_drop_pe_file":
content = vmrayGeneric(pattern, "filename", 1)
elif only_network_info is False and pattern["category"] == "_static" and pattern["operation"] == "_execute_dropped_pe_file":
content = vmrayGeneric(pattern, "filename", 1)
r['results'].append({'types': types, 'values': values, 'comment': comment, 'to_ids': include_static_to_ids})
if pattern == "registry":
for el in patterns[pattern]:
values = el["reg_key_name"]
types = ["regkey"]
include_static_to_ids_tmp = include_static_to_ids
if "operations" in el:
sources = el["operations"]
if sources == ["access"]:
include_static_to_ids_tmp = False
comment = "Operations: " + ", ".join(str(x) for x in sources)
else:
comment = ""
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_memory":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_memory_system":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_memory_non_system":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_control_flow":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_injection" and pattern["operation"] == "_modify_control_flow_non_system":
content = vmrayGeneric(pattern)
elif only_network_info is False and pattern["category"] == "_file_system" and pattern["operation"] == "_create_many_files":
content = vmrayGeneric(pattern)
r['results'].append({'types': types, 'values': values, 'comment': comment, 'to_ids': include_static_to_ids_tmp})
if pattern == "urls":
for el in patterns[pattern]:
values = el["url"]
types = ["url"]
if "operations" in el:
sources = el["operations"]
comment = "Operations: " + ", ".join(str(x) for x in sources)
else:
comment = ""
elif only_network_info is False and pattern["category"] == "_hide_tracks" and pattern["operation"] == "_hide_data_in_registry":
content = vmrayGeneric(pattern, "regkey", 1)
r['results'].append({'types': types, 'values': values, 'comment': comment, 'to_ids': include_static_to_ids})
elif only_network_info is False and pattern["category"] == "_persistence" and pattern["operation"] == "_install_startup_script":
content = vmrayGeneric(pattern, "regkey", 1)
elif only_network_info is False and pattern["category"] == "_os" and pattern["operation"] == "_enable_process_privileges":
content = vmrayGeneric(pattern)
if content:
r["results"].append(content["attributes"])
r["results"].append(content["text"])
# Remove empty results
r["results"] = [x for x in r["results"] if isinstance(x, dict) and len(x["values"]) != 0]
# Remove doubles
for el in r["results"]:
if el not in y["results"]:
y["results"].append(el)
return y
else:
return False
@ -239,84 +388,7 @@ def vmrayVtiPatterns(vti_patterns):
def vmrayCleanup(x):
''' Remove doubles'''
y = {'results': []}
for el in x["results"]:
if el not in y["results"]:
y["results"].append(el)
return y
def vmraySanitizeInput(s):
''' Sanitize some input so it gets properly imported in MISP'''
if s:
s = s.replace('"', '')
s = re.sub('\\\\', r'\\', s)
return s
else:
return False
def vmrayGeneric(el, attr="", attrpos=1):
''' Convert a 'generic' VTI pattern to MISP data'''
r = {"values": []}
f = {"values": []}
if el:
content = el["technique_desc"]
if content:
if attr:
# Some elements are put between \"\" ; replace them to single
content = content.replace("\"\"", "\"")
content_split = content.split("\"")
# Attributes are between open " and close "; so use >
if len(content_split) > attrpos:
content_split[attrpos] = vmraySanitizeInput(content_split[attrpos])
r["values"].append(content_split[attrpos])
r["types"] = [attr]
# Adding the value also as text to get the extra description,
# but this is pretty useless for "url"
if include_textdescr and attr != "url":
f["values"].append(vmraySanitizeInput(content))
f["types"] = ["text"]
return {"text": f, "attributes": r}
else:
return False
else:
return False
def vmrayConnect(el):
''' Extension of vmrayGeneric , parse network connect data'''
ipre = re.compile("([0-9]{1,3}.){3}[0-9]{1,3}")
r = {"values": []}
f = {"values": []}
if el:
content = el["technique_desc"]
if content:
target = content.split("\"")
# port = (target[1].split(":"))[1] ## FIXME: not used
host = (target[1].split(":"))[0]
if ipre.match(str(host)):
r["values"].append(host)
r["types"] = ["ip-dst"]
else:
r["values"].append(host)
r["types"] = ["domain", "hostname"]
f["values"].append(vmraySanitizeInput(target[1]))
f["types"] = ["text"]
if include_textdescr:
f["values"].append(vmraySanitizeInput(content))
f["types"] = ["text"]
return {"text": f, "attributes": r}
else:
return False
else:
return False

View File

@ -12,6 +12,7 @@ setup(
description='MISP modules are autonomous modules that can be used for expansion and other services in MISP',
packages=find_packages(),
entry_points={'console_scripts': ['misp-modules = misp_modules:main']},
scripts=['tools/update_misp_modules.sh'],
test_suite="tests",
classifiers=[
'License :: OSI Approved :: GNU Affero General Public License v3',

65
tests/test_expansions.py Normal file
View File

@ -0,0 +1,65 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import requests
from urllib.parse import urljoin
import json
class TestExpansions(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.headers = {'Content-Type': 'application/json'}
self.url = "http://127.0.0.1:6666/"
def misp_modules_post(self, query):
return requests.post(urljoin(self.url, "query"), json=query)
def get_values(self, response):
data = response.json()
if not isinstance(data, dict):
print(json.dumps(data, indent=2))
return data
return data['results'][0]['values']
def test_cve(self):
query = {"module": "cve", "vulnerability": "CVE-2010-3333"}
response = self.misp_modules_post(query)
self.assertTrue(self.get_values(response).startswith("Stack-based buffer overflow in Microsoft Office XP SP3, Office 2003 SP3"))
def test_dns(self):
query = {"module": "dns", "hostname": "www.circl.lu", "config": {"nameserver": "8.8.8.8"}}
response = self.misp_modules_post(query)
self.assertEqual(self.get_values(response), ['149.13.33.14'])
def test_macvendors(self):
query = {"module": "macvendors", "mac-address": "FC-A1-3E-2A-1C-33"}
response = self.misp_modules_post(query)
self.assertEqual(self.get_values(response), 'Samsung Electronics Co.,Ltd')
def test_haveibeenpwned(self):
query = {"module": "hibp", "email-src": "info@circl.lu"}
response = self.misp_modules_post(query)
to_check = self.get_values(response)
if to_check == "haveibeenpwned.com API not accessible (HTTP 403)":
self.skipTest(f"haveibeenpwned blocks travis IPs: {response}")
self.assertEqual(to_check, 'OK (Not Found)', response)
def test_greynoise(self):
query = {"module": "greynoise", "ip-dst": "1.1.1.1"}
response = self.misp_modules_post(query)
self.assertEqual(self.get_values(response)['status'], 'ok')
def test_ipasn(self):
query = {"module": "ipasn", "ip-dst": "1.1.1.1"}
response = self.misp_modules_post(query)
key = list(self.get_values(response)['response'].keys())[0]
entry = self.get_values(response)['response'][key]['asn']
self.assertEqual(entry, '13335')
def test_bgpranking(self):
query = {"module": "bgpranking", "AS": "13335"}
response = self.misp_modules_post(query)
self.assertEqual(self.get_values(response)['response']['asn_description'], 'CLOUDFLARENET - Cloudflare, Inc., US')

37
tools/update_misp_modules.sh Executable file
View File

@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -e
set -x
# Updates the MISP Modules while respecting the current permissions
# It aims to support the two following installation methods:
# * Everything is runinng on the same machine following the MISP installation guide.
# * The modules are installed using pipenv on a different machine from the one where MISP is running.
if [ -d "/var/www/MISP" ] && [ -d "/usr/local/src/misp-modules" ]
then
echo "MISP is installed on the same machine, following the recommanded install script. Using MISP virtualenv."
PATH_TO_MISP="/var/www/MISP"
PATH_TO_MISP_MODULES="/usr/local/src/misp-modules"
pushd ${PATH_TO_MISP_MODULES}
USER=`stat -c "%U" .`
sudo -H -u ${USER} git pull
sudo -H -u ${USER} ${PATH_TO_MISP}/venv/bin/pip install -U -r REQUIREMENTS
sudo -H -u ${USER} ${PATH_TO_MISP}/venv/bin/pip install -U -e .
service misp-modules restart
popd
else
if ! [ -x "$(command -v pipenv)" ]; then
echo 'Error: pipenv not available, unable to automatically update.' >&2
exit 1
fi
echo "Standalone mode, use pipenv from the current directory."
git pull
pipenv install
fi