From 80dba63a8b4d291a6ec4cd157edeaa447431b4d9 Mon Sep 17 00:00:00 2001 From: Davide Date: Sun, 9 Jul 2023 12:42:59 +0200 Subject: [PATCH 1/3] Module updated to apiosintDSv2.0 --- README.md | 3 +- REQUIREMENTS | 2 +- misp_modules/modules/expansion/apiosintds.py | 292 ++++++++++++++++--- 3 files changed, 251 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index b623c15..26a8360 100644 --- a/README.md +++ b/README.md @@ -13,8 +13,7 @@ For more information: [Extending MISP with Python modules](https://www.misp-proj ## Existing MISP modules ### Expansion modules - -* [apiosintDS](misp_modules/modules/expansion/apiosintds.py) - a hover and expansion module to query the OSINT.digitalside.it API. +* [apiosintDS](misp_modules/modules/expansion/apiosintds.py) - a hover and expansion module to query the [OSINT.digitalside.it](https://osint.digitalside.it) API. [Documentation](https://apiosintds.readthedocs.io/en/latest/userguidemisp.html). * [API Void](misp_modules/modules/expansion/apivoid.py) - an expansion and hover module to query API Void with a domain attribute. * [AssemblyLine submit](misp_modules/modules/expansion/assemblyline_submit.py) - an expansion module to submit samples and urls to AssemblyLine. * [AssemblyLine query](misp_modules/modules/expansion/assemblyline_query.py) - an expansion module to query AssemblyLine and parse the full submission report. diff --git a/REQUIREMENTS b/REQUIREMENTS index 620d7a6..a3ca744 100644 --- a/REQUIREMENTS +++ b/REQUIREMENTS @@ -3,7 +3,7 @@ aiohttp==3.8.4 aiosignal==1.3.1 ; python_version >= '3.7' antlr4-python3-runtime==4.9.3 anyio==3.6.2 ; python_full_version >= '3.6.2' -apiosintds==1.8.3 +git+https://github.com/davidonzo/apiosintDS@misp appdirs==1.4.4 argcomplete==3.0.8 ; python_version >= '3.6' argparse==1.4.0 diff --git a/misp_modules/modules/expansion/apiosintds.py b/misp_modules/modules/expansion/apiosintds.py index ac0dfa4..afa73ee 100644 --- a/misp_modules/modules/expansion/apiosintds.py +++ b/misp_modules/modules/expansion/apiosintds.py @@ -16,14 +16,14 @@ misperrors = {'error': 'Error'} mispattributes = {'input': ["domain", "domain|ip", "hostname", "ip-dst", "ip-src", "ip-dst|port", "ip-src|port", "url", "md5", "sha1", "sha256", "filename|md5", "filename|sha1", "filename|sha256"], - 'output': ["domain", "ip-dst", "url", "comment", "md5", "sha1", "sha256"] + 'output': ["domain", "ip-dst", "url", "comment", "md5", "sha1", "sha256", "link", "text"] } -moduleinfo = {'version': '0.1', 'author': 'Davide Baglieri aka davidonzo', +moduleinfo = {'version': '0.2', 'author': 'Davide Baglieri aka davidonzo', 'description': 'On demand query API for OSINT.digitalside.it project.', 'module-type': ['expansion', 'hover']} -moduleconfig = ['import_related_hashes', 'cache', 'cache_directory'] +moduleconfig = ['STIX2_details', 'import_related', 'cache', 'cache_directory', 'cache_timeout_h', 'local_directory'] def handler(q=False): @@ -62,18 +62,49 @@ def handler(q=False): tosubmit.append(request['filename|sha256'].split('|')[1]) else: return False + + persistent = 0 + if request.get('persistent'): + persistent = request["persistent"] submitcache = False submitcache_directory = False - import_related_hashes = False + submitcache_timeout = False + submit_stix = False + import_related = False + sumbit_localdirectory = False r = {"results": []} if request.get('config'): + if request['config'].get('cache') and request['config']['cache'].lower() == "yes": submitcache = True - if request['config'].get('import_related_hashes') and request['config']['import_related_hashes'].lower() == "yes": - import_related_hashes = True + + if request['config'].get('import_related') and request['config']['import_related'].lower() == "yes": + import_related = True + + if request['config'].get('STIX2_details') and request['config']['STIX2_details'].lower() == "yes": + submit_stix = True + + if request['config'].get('cache_timeout_h') and len(request['config']['cache_timeout_h']) > 0: + submitcache_timeout = int(request['config'].get('cache_timeout_h')) + + localdirectory = request['config'].get('local_directory') + if localdirectory and len(localdirectory) > 0: + if os.access(localdirectory, os.R_OK): + sumbit_localdirectory = localdirectory + WarningMSG = "Local directory OK! Ignoring cache configuration..." + log.debug(str(WarningMSG)) + submitcache = False + sumbitcache_titmeout = False + submitcache_directory = False + else: + ErrorMSG = "Unable to read local 'Threat-Intel' directory ("+localdirectory+"). Please, check your configuration and retry." + log.debug(str(ErrorMSG)) + misperrors['error'] = ErrorMSG + return misperrors + if submitcache: cache_directory = request['config'].get('cache_directory') if cache_directory and len(cache_directory) > 0: @@ -90,52 +121,227 @@ def handler(q=False): misperrors['error'] = ErrorMSG return misperrors else: - log.debug("Cache option is set to " + str(submitcache) + ". You are not using the internal cache system and this is NOT recommended!") - log.debug("Please, consider to turn on the cache setting it to 'Yes' and specifing a writable directory for the cache directory option.") + if sumbit_localdirectory == False: + log.debug("Cache option is set to " + str(submitcache) + ". You are not using the internal cache system and this is NOT recommended!") + log.debug("Please, consider to turn on the cache setting it to 'Yes' and specifing a writable directory for the cache directory option.") try: - response = apiosintDS.request(entities=tosubmit, cache=submitcache, cachedirectory=submitcache_directory, verbose=True) - r["results"] += reversed(apiosintParser(response, import_related_hashes)) - except Exception as e: + response = apiosintDS.request(entities=tosubmit, stix=submit_stix, cache=submitcache, cachedirectory=submitcache_directory, cachetimeout=submitcache_timeout, verbose=True, localdirectory=sumbit_localdirectory) + r["results"] += apiosintParserHover(persistent, response, import_related, submit_stix) + except ValueError as e: log.debug(str(e)) misperrors['error'] = str(e) return r - -def apiosintParser(response, import_related_hashes): +def apiosintParserHover(ispersistent, response, import_related, stix): + apiosinttype = ['hash', 'ip', 'url', 'domain'] + line = "##############################################" + linedot = "--------------------------------------------------------------------" + linedotty = "-------------------" ret = [] + retHover = [] if isinstance(response, dict): for key in response: - for item in response[key]["items"]: - if item["response"]: - comment = item["item"] + " IS listed by OSINT.digitalside.it. Date list: " + response[key]["list"]["date"] - if key == "url": - if "hashes" in item.keys(): - if "sha256" in item["hashes"].keys(): - ret.append({"types": ["sha256"], "values": [item["hashes"]["sha256"]]}) - if "sha1" in item["hashes"].keys(): - ret.append({"types": ["sha1"], "values": [item["hashes"]["sha1"]]}) - if "md5" in item["hashes"].keys(): - ret.append({"types": ["md5"], "values": [item["hashes"]["md5"]]}) + if key in apiosinttype: + for item in response[key]["items"]: + if item["response"]: + comment = "IoC '"+item["item"] + "' found in OSINT.DigitaiSide.it repository. List file: "+response[key]["list"]["file"]+". List date: " + response[key]["list"]["date"] + commentH = "IoC '"+item["item"] + "' found in OSINT.DigitaiSide.it repository." + CommentHDate = "List file: "+response[key]["list"]["file"]+". Date list: " + response[key]["list"]["date"] + ret.append({"types": ["text"], "values": [comment]}) + + retHover.append({"types": ["text"], "values": [commentH]}) + retHover.append({"types": ["text"], "values": [CommentHDate]}) + retHover.append({"types": ["text"], "values": [line]}) + + if key in ["url", "hash"]: + if "hashes" in item: + headhash = "Hashes set" + retHover.append({"types": ["text"], "values": [headhash]}) + if "md5" in item["hashes"].keys(): + ret.append({"types": ["md5"], "values": [item["hashes"]["md5"]], "comment": "Related to: " + item["item"]}) + + strmd5 = "MD5: "+item["hashes"]["md5"] + retHover.append({"types": ["text"], "values": [strmd5]}) + + if "sha1" in item["hashes"].keys(): + ret.append({"types": ["sha1"], "values": [item["hashes"]["sha1"]], "comment": "Related to: " + item["item"]}) + + strsha1 = "SHA1: "+item["hashes"]["sha1"] + retHover.append({"types": ["text"], "values": [strsha1]}) + + if "sha256" in item["hashes"].keys(): + ret.append({"types": ["sha256"], "values": [item["hashes"]["sha256"]], "comment": "Related to: " + item["item"]}) + + strsha256 = "SHA256: "+item["hashes"]["sha256"] + retHover.append({"types": ["text"], "values": [strsha256]}) + + if "online_reports" in item: + headReports = "Online Reports (availability depends on retention)" + retHover.append({"types": ["text"], "values": [linedot]}) + retHover.append({"types": ["text"], "values": [headReports]}) + onlierepor = item["online_reports"] + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["MISP_EVENT"]], "comment": "MISP Event related to: " + item["item"]}) + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["MISP_CSV"]], "comment": "MISP CSV related to: " + item["item"]}) + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["OSINTDS_REPORT"]], "comment": "DigitalSide report related to: " + item["item"]}) + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["STIX"]], "comment": "STIX2 report related to: " + item["item"]}) + + MISPEVENT = "MISP Event => "+onlierepor["MISP_EVENT"] + MISPCSV = "MISP CSV => "+onlierepor["MISP_CSV"] + OSINTDS = "DigitalSide report => "+onlierepor["OSINTDS_REPORT"] + STIX = "STIX report => "+onlierepor["STIX"] + + retHover.append({"types": ["text"], "values": [MISPEVENT]}) + retHover.append({"types": ["text"], "values": [MISPCSV]}) + retHover.append({"types": ["text"], "values": [OSINTDS]}) + retHover.append({"types": ["text"], "values": [STIX]}) + + if stix and onlierepor: + if "STIXDETAILS" in onlierepor: + retHover.append({"types": ["text"], "values": [linedot]}) + headStix = "STIX2 report details" + stixobj = onlierepor["STIXDETAILS"] + stxdet = "TLP:"+stixobj["tlp"]+" | Observation: "+str(stixobj["number_observed"])+" | First seen: "+stixobj["first_observed"]+" | First seen: "+stixobj["last_observed"] + ret.append({"types": ["comment"], "values": [stxdet], "comment": "STIX2 details for: " + item["item"]}) + retHover.append({"types": ["text"], "values": [headStix]}) + retHover.append({"types": ["text"], "values": [stxdet]}) + + + if stixobj["observed_time_frame"] != False: + obstf = "Observation time frame: "+str(stixobj["observed_time_frame"]) + ret.append({"types": ["comment"], "values": [obstf], "comment": "STIX2 details for: " + item["item"]}) + retHover.append({"types": ["text"], "values": [obstf]}) + + filename = stixobj["filename"] + ret.append({"category": "Payload delivery", "types": ["filename"], "values": [filename], "comment": "STIX2 details for: " + item["item"]}) + + Hovefilename = "Filename: "+filename + retHover.append({"types": ["text"], "values": [Hovefilename]}) + + filesize = stixobj["filesize"] + ret.append({"types": ["size-in-bytes"], "values": [filesize], "comment": "STIX2 details for: " + item["item"]}) + + Hovefilesize = "Filesize in bytes: "+str(filesize) + retHover.append({"types": ["text"], "values": [Hovefilesize]}) + + filetype = stixobj["mime_type"] + ret.append({"category": "Payload delivery", "types": ["mime-type"], "values": [filetype], "comment": "STIX2 details for: " + item["item"]}) + + Hovemime = "Filetype: "+filetype + retHover.append({"types": ["text"], "values": [Hovemime]}) + + if "virus_total" in stixobj: + if stixobj["virus_total"] != False: + VTratio = "VirusTotal Ratio: "+str(stixobj["virus_total"]["vt_detection_ratio"]) + ret.append({"types": ["comment"], "values": [VTratio], "comment": "STIX2 details for: " + item["item"]}) + retHover.append({"types": ["text"], "values": [VTratio]}) - if len(item["related_urls"]) > 0: - for urls in item["related_urls"]: - if isinstance(urls, dict): - itemToInclude = urls["url"] - if import_related_hashes: - if "hashes" in urls.keys(): - if "sha256" in urls["hashes"].keys(): - ret.append({"types": ["sha256"], "values": [urls["hashes"]["sha256"]], "comment": "Related to: " + itemToInclude}) - if "sha1" in urls["hashes"].keys(): - ret.append({"types": ["sha1"], "values": [urls["hashes"]["sha1"]], "comment": "Related to: " + itemToInclude}) - if "md5" in urls["hashes"].keys(): - ret.append({"types": ["md5"], "values": [urls["hashes"]["md5"]], "comment": "Related to: " + itemToInclude}) - ret.append({"types": ["url"], "values": [itemToInclude], "comment": "Related to: " + item["item"]}) - else: - ret.append({"types": ["url"], "values": [urls], "comment": "Related URL to: " + item["item"]}) - else: - comment = item["item"] + " IS NOT listed by OSINT.digitalside.it. Date list: " + response[key]["list"]["date"] - ret.append({"types": ["text"], "values": [comment]}) - return ret + VTReport = str(stixobj["virus_total"]["vt_report"]) + ret.append({"category": "External analysis", "types": ["link"], "values": [VTReport], "comment": "VirusTotal Report for: " + item["item"]}) + if import_related: + if len(item["related_urls"]) > 0: + retHover.append({"types": ["text"], "values": [linedot]}) + countRelated = "Related URLS count: "+str(len(item["related_urls"])) + retHover.append({"types": ["text"], "values": [countRelated]}) + for urls in item["related_urls"]: + if isinstance(urls, dict): + itemToInclude = urls["url"] + ret.append({"types": ["url"], "values": [itemToInclude], "comment": "Download URL for "+urls["hashes"]["md5"]+". Related to: " + item["item"]}) + + retHover.append({"types": ["text"], "values": [linedot]}) + relatedURL = "Related URL "+itemToInclude + retHover.append({"types": ["text"], "values": [relatedURL]}) + + if "hashes" in urls.keys(): + if "md5" in urls["hashes"].keys(): + ret.append({"types": ["md5"], "values": [urls["hashes"]["md5"]], "comment": "Related to: " + itemToInclude}) + + strmd5 = "MD5: "+urls["hashes"]["md5"] + retHover.append({"types": ["text"], "values": [strmd5]}) + + if "sha1" in urls["hashes"].keys(): + ret.append({"types": ["sha1"], "values": [urls["hashes"]["sha1"]], "comment": "Related to: " + itemToInclude}) + + strsha1 = "SHA1: "+urls["hashes"]["sha1"] + retHover.append({"types": ["text"], "values": [strsha1]}) + + if "sha256" in urls["hashes"].keys(): + ret.append({"types": ["sha256"], "values": [urls["hashes"]["sha256"]], "comment": "Related to: " + itemToInclude}) + + strsha256 = "SHA256: "+urls["hashes"]["sha256"] + retHover.append({"types": ["text"], "values": [strsha256]}) + + + headReports = "Online Reports (availability depends on retention)" + retHover.append({"types": ["text"], "values": [linedotty]}) + retHover.append({"types": ["text"], "values": [headReports]}) + onlierepor = urls["online_reports"] + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["MISP_EVENT"]], "comment": "MISP Event related to: " + item["item"]}) + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["MISP_CSV"]], "comment": "MISP CSV related to: " + item["item"]}) + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["OSINTDS_REPORT"]], "comment": "DigitalSide report related to: " + item["item"]}) + ret.append({"category": "External analysis", "types": ["link"], "values": [onlierepor["STIX"]], "comment": "STIX2 report related to: " + item["item"]}) + + MISPEVENT = "MISP Event => "+onlierepor["MISP_EVENT"] + MISPCSV = "MISP CSV => "+onlierepor["MISP_CSV"] + OSINTDS = "DigitalSide report => "+onlierepor["OSINTDS_REPORT"] + STIX = "STIX report => "+onlierepor["STIX"] + + retHover.append({"types": ["text"], "values": [MISPEVENT]}) + retHover.append({"types": ["text"], "values": [MISPCSV]}) + retHover.append({"types": ["text"], "values": [OSINTDS]}) + retHover.append({"types": ["text"], "values": [STIX]}) + + if stix and onlierepor: + if "STIXDETAILS" in onlierepor: + retHover.append({"types": ["text"], "values": [linedotty]}) + headStix = "STIX2 report details" + stixobj = onlierepor["STIXDETAILS"] + stxdet = "TLP:"+stixobj["tlp"]+" | Observation: "+str(stixobj["number_observed"])+" | First seen: "+stixobj["first_observed"]+" | First seen: "+stixobj["last_observed"] + ret.append({"types": ["comment"], "values": [stxdet], "comment": "STIX2 details for: " + item["item"]}) + retHover.append({"types": ["text"], "values": [headStix]}) + retHover.append({"types": ["text"], "values": [stxdet]}) + + if stixobj["observed_time_frame"] != False: + obstf = "Observation time frame: "+str(stixobj["observed_time_frame"]) + ret.append({"types": ["comment"], "values": [obstf], "comment": "STIX2 details for: " + item["item"]}) + retHover.append({"types": ["text"], "values": [obstf]}) + + filename = stixobj["filename"] + ret.append({"category": "Payload delivery", "types": ["filename"], "values": [filename], "comment": "STIX2 details for: " + item["item"]}) + + Hovefilename = "Filename: "+filename + retHover.append({"types": ["text"], "values": [Hovefilename]}) + + filesize = stixobj["filesize"] + ret.append({"types": ["size-in-bytes"], "values": [filesize], "comment": "STIX2 details for: " + item["item"]}) + + Hovefilesize = "Filesize in bytes: "+str(filesize) + retHover.append({"types": ["text"], "values": [Hovefilesize]}) + + filetype = stixobj["mime_type"] + ret.append({"category": "Payload delivery", "types": ["mime-type"], "values": [filetype], "comment": "STIX2 details for: " + item["item"]}) + + Hovemime = "Filetype: "+filetype + retHover.append({"types": ["text"], "values": [Hovemime]}) + + if "virus_total" in stixobj: + if stixobj["virus_total"] != False: + VTratio = "VirusTotal Ratio: "+stixobj["virus_total"]["vt_detection_ratio"] + ret.append({"types": ["comment"], "values": [VTratio], "comment": "STIX2 details for: " + item["item"]}) + retHover.append({"types": ["text"], "values": [VTratio]}) + + VTReport = stixobj["virus_total"]["vt_report"] + ret.append({"category": "External analysis", "types": ["link"], "values": [VTReport], "comment": "VirusTotal Report for: " + item["item"]}) + else: + ret.append({"types": ["url"], "values": [urls], "comment": "Download URL for: " + item["item"]}) + urlHover = "URL => "+urls + retHover.append({"types": ["text"], "values": [urlHover]}) + else: + notfound = item["item"] + " IS NOT listed by OSINT.digitalside.it. Date list: " + item[key]["list"]["date"] + ret.append({"types": ["comment"], "values": [notfound]}) + + if ispersistent == 0: + return ret + return retHover def introspection(): From 4e00e60951a1fd70afe5834dd0386de6db310285 Mon Sep 17 00:00:00 2001 From: Davide Date: Sun, 9 Jul 2023 13:35:47 +0200 Subject: [PATCH 2/3] Bug fix --- misp_modules/modules/expansion/apiosintds.py | 2 +- tests/test_expansions.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/misp_modules/modules/expansion/apiosintds.py b/misp_modules/modules/expansion/apiosintds.py index afa73ee..7186be8 100644 --- a/misp_modules/modules/expansion/apiosintds.py +++ b/misp_modules/modules/expansion/apiosintds.py @@ -336,7 +336,7 @@ def apiosintParserHover(ispersistent, response, import_related, stix): urlHover = "URL => "+urls retHover.append({"types": ["text"], "values": [urlHover]}) else: - notfound = item["item"] + " IS NOT listed by OSINT.digitalside.it. Date list: " + item[key]["list"]["date"] + notfound = item["item"] + " IS NOT listed by OSINT.digitalside.it. Date list: " + response[key]["list"]["date"] ret.append({"types": ["comment"], "values": [notfound]}) if ispersistent == 0: diff --git a/tests/test_expansions.py b/tests/test_expansions.py index 5f4d326..833bada 100644 --- a/tests/test_expansions.py +++ b/tests/test_expansions.py @@ -74,12 +74,13 @@ class TestExpansions(unittest.TestCase): return data['results'][0]['values'] def test_apiosintds(self): - query = {'module': 'apiosintds', 'ip-dst': '185.255.79.90'} + query = {'module': 'apiosintds', 'ip-dst': '10.10.10.10'} response = self.misp_modules_post(query) + try: - self.assertTrue(self.get_values(response).startswith('185.255.79.90 IS listed by OSINT.digitalside.it.')) + self.assertTrue(self.get_values(response).startswith('IoC 10.10.10.10')) except AssertionError: - self.assertTrue(self.get_values(response).startswith('185.255.79.90 IS NOT listed by OSINT.digitalside.it.')) + self.assertTrue(self.get_values(response).startswith('10.10.10.10 IS NOT listed by OSINT.digitalside.it.')) def test_apivoid(self): module_name = "apivoid" From 702158ab16225550be5986c21c583f34c95589de Mon Sep 17 00:00:00 2001 From: Davide Date: Sun, 9 Jul 2023 13:37:19 +0200 Subject: [PATCH 3/3] Bug fix --- misp_modules/modules/expansion/apiosintds.py | 1 + 1 file changed, 1 insertion(+) diff --git a/misp_modules/modules/expansion/apiosintds.py b/misp_modules/modules/expansion/apiosintds.py index 7186be8..0eb8208 100644 --- a/misp_modules/modules/expansion/apiosintds.py +++ b/misp_modules/modules/expansion/apiosintds.py @@ -338,6 +338,7 @@ def apiosintParserHover(ispersistent, response, import_related, stix): else: notfound = item["item"] + " IS NOT listed by OSINT.digitalside.it. Date list: " + response[key]["list"]["date"] ret.append({"types": ["comment"], "values": [notfound]}) + retHover.append({"types": ["comment"], "values": [notfound]}) if ispersistent == 0: return ret