diff --git a/HOWTO.md b/HOWTO.md index d4a7b962..1a66402b 100644 --- a/HOWTO.md +++ b/HOWTO.md @@ -6,7 +6,7 @@ How to feed the AIL framework For the moment, there are three different ways to feed AIL with data: -1. Be a collaborator of CIRCL and ask to access our feed. It will be sent to the static IP your are using for AIL. +1. Be a collaborator of CIRCL and ask to access our feed. It will be sent to the static IP you are using for AIL. 2. You can setup [pystemon](https://github.com/CIRCL/pystemon) and use the custom feeder provided by AIL (see below). diff --git a/OVERVIEW.md b/OVERVIEW.md index cf40bad9..effb387d 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -20,6 +20,7 @@ Redis and ARDB overview - DB 0 - Lines duplicate - DB 1 - Hashes + ARDB overview --------------------------- ARDB_DB @@ -31,3 +32,42 @@ ARDB_DB * DB 6 - Tags * DB 7 - Metadata * DB 8 - Statistics + +* DB 7 - Metadata: + ----------------------------------------- BASE64 ---------------------------------------- + + HSET - 'metadata_hash:'+hash 'saved_path' saved_path + 'size' size + 'first_seen' first_seen + 'last_seen' last_seen + 'estimated_type' estimated_type + 'vt_link' vt_link + 'vt_report' vt_report + 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + 'base64_decoder' nb_encoded + 'binary_decoder' nb_encoded + + SET - 'all_decoder' decoder* + + SET - 'hash_all_type' hash_type * + SET - 'hash_base64_all_type' hash_type * + SET - 'hash_binary_all_type' hash_type * + + SET - 'hash_paste:'+paste hash * + SET - 'base64_paste:'+paste hash * + SET - 'binary_paste:'+paste hash * + + ZADD - 'hash_date:'+20180622 hash * nb_seen_this_day + ZADD - 'base64_date:'+20180622 hash * nb_seen_this_day + ZADD - 'binary_date:'+20180622 hash * nb_seen_this_day + + ZADD - 'nb_seen_hash:'+hash paste * nb_seen_in_paste + ZADD - 'base64_hash:'+hash paste * nb_seen_in_paste + ZADD - 'binary_hash:'+hash paste * nb_seen_in_paste + + ZADD - 'hash_type:'+type date nb_seen + ZADD - 'base64_type:'+type date nb_seen + ZADD - 'binary_type:'+type date nb_seen + + GET - 'base64_decoded:'+date nd_decoded + GET - 'binary_decoded:'+date nd_decoded diff --git a/README.md b/README.md index 0c500efd..83bb0b86 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,3 @@ -[](https://travis-ci.org/CIRCL/AIL-framework) - AIL === @@ -11,6 +9,22 @@ AIL is a modular framework to analyse potential information leaks from unstructu  +<table> +<tr> + <td>Latest Release</td> + <td><a href="https://badge.fury.io/gh/CIRCL%2FAIL-Framework"><img src="https://badge.fury.io/gh/CIRCL%2FAIL-Framework.svg" alt="GitHub version" height="18"></a></td> +</tr> +<tr> + <td>Contributors</td> + <td><img src="https://img.shields.io/github/contributors/CIRCL/AIL-Framework.svg" /></td> +</tr> +<tr> + <td>License</td> + <td><img src="https://img.shields.io/github/license/CIRCL/AIL-Framework.svg" /></td> +</tr> +</table> + + Features -------- @@ -31,14 +45,17 @@ Features * Terms, Set of terms and Regex tracking and occurrence * Many more modules for extracting phone numbers, credentials and others * Alerting to [MISP](https://github.com/MISP/MISP) to share found leaks within a threat intelligence platform using [MISP standard](https://www.misp-project.org/objects.html#_ail_leak) -* Detect and decode Base64 and store files +* Detect and decode encoded file (Base64, hex encoded or your own decoding scheme) and store files * Detect Amazon AWS and Google API keys * Detect Bitcoin address and Bitcoin private keys -* Detect private keys and certificate +* Detect private keys, certificate, keys (including SSH, OpenVPN) +* Detect IBAN bank accounts * Tagging system with [MISP Galaxy](https://github.com/MISP/misp-galaxy) and [MISP Taxonomies](https://github.com/MISP/misp-taxonomies) tags * UI paste submission * Create events on [MISP](https://github.com/MISP/MISP) and cases on [The Hive](https://github.com/TheHive-Project/TheHive) * Automatic paste export at detection on [MISP](https://github.com/MISP/MISP) (events) and [The Hive](https://github.com/TheHive-Project/TheHive) (alerts) on selected tags +* Extracted and decoded files can be searched by date range, type of file (mime-type) and encoding discovered +* Graph relationships between decoded file (hashes) Installation ------------ @@ -152,6 +169,12 @@ Trending charts   +Extracted encoded files from pastes +----------------------------------- + + + + Browsing -------- diff --git a/bin/ApiKey.py b/bin/ApiKey.py index e7ded9b2..faf4b2d9 100755 --- a/bin/ApiKey.py +++ b/bin/ApiKey.py @@ -86,8 +86,7 @@ if __name__ == "__main__": if message is not None: - search_api_key(message) - + search_api_key(message) else: publisher.debug("Script ApiKey is Idling 10s") diff --git a/bin/Base64.py b/bin/Base64.py index 960ca6de..e8b3fbc5 100755 --- a/bin/Base64.py +++ b/bin/Base64.py @@ -8,6 +8,7 @@ import time import os import datetime +import redis from pubsublogger import publisher @@ -31,7 +32,7 @@ def timeout_handler(signum, frame): signal.signal(signal.SIGALRM, timeout_handler) -def search_base64(content, message): +def search_base64(content, message, date): find = False base64_list = re.findall(regex_base64, content) if(len(base64_list) > 0): @@ -39,6 +40,7 @@ def search_base64(content, message): for b64 in base64_list: if len(b64) >= 40 : decode = base64.b64decode(b64) + print(decode) type = magic.from_buffer(decode, mime=True) #print(type) @@ -46,6 +48,8 @@ def search_base64(content, message): find = True hash = sha1(decode).hexdigest() + print(message) + print(hash) data = {} data['name'] = hash @@ -54,8 +58,37 @@ def search_base64(content, message): data['estimated type'] = type json_data = json.dumps(data) - save_base64_as_file(decode, type, hash, json_data) - print('found {} '.format(type)) + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.zincrby('base64_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this file on this paste + if serv_metadata.zscore('base64_hash:'+hash, message) is None: + print('first') + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + + serv_metadata.sadd('base64_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby('base64_hash:'+hash, message, 1)# hash - paste map + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + serv_metadata.sadd('hash_base64_all_type', type) + serv_metadata.zincrby('base64_type:'+type, date_key, 1) + + save_base64_as_file(decode, type, hash, json_data, id) + print('found {} '.format(type)) + # duplicate + else: + serv_metadata.zincrby('base64_hash:'+hash, message, 1) # number of b64 on this paste if(find): publisher.warning('base64 decoded') @@ -68,10 +101,10 @@ def search_base64(content, message): msg = 'infoleak:automatic-detection="base64";{}'.format(message) p.populate_set_out(msg, 'Tags') -def save_base64_as_file(decode, type, hash, json_data): +def save_base64_as_file(decode, type, hash, json_data, id): - filename_b64 = os.path.join(os.environ['AIL_HOME'], - p.config.get("Directories", "base64"), type, hash[:2], hash) + local_filename_b64 = os.path.join(p.config.get("Directories", "base64"), type, hash[:2], hash) + filename_b64 = os.path.join(os.environ['AIL_HOME'], local_filename_b64) filename_json = os.path.join(os.environ['AIL_HOME'], p.config.get("Directories", "base64"), type, hash[:2], hash + '.json') @@ -83,6 +116,10 @@ def save_base64_as_file(decode, type, hash, json_data): with open(filename_b64, 'wb') as f: f.write(decode) + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_b64) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_b64)) + with open(filename_json, 'w') as f: f.write(json_data) @@ -103,6 +140,12 @@ if __name__ == '__main__': p = Process(config_section) max_execution_time = p.config.getint("Base64", "max_execution_time") + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + # Sent to the logging a description of the module publisher.info("Base64 started") @@ -127,13 +170,12 @@ if __name__ == '__main__': # Do something with the message from the queue #print(filename) content = paste.get_p_content() - search_base64(content,message) - - # (Optional) Send that thing to the next queue - #p.populate_set_out(something_has_been_done) + date = str(paste._get_p_date()) + search_base64(content,message, date) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) diff --git a/bin/Binary.py b/bin/Binary.py new file mode 100755 index 00000000..29d6f2c5 --- /dev/null +++ b/bin/Binary.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" + Binary module + + Dectect Binary and decode it +""" +import time +import os +import datetime +import redis + +from pubsublogger import publisher + +from Helper import Process +from packages import Paste + +import re +from hashlib import sha1 +import magic +import json + +import signal + +class TimeoutException(Exception): + pass + +def timeout_handler(signum, frame): + raise TimeoutException + +signal.signal(signal.SIGALRM, timeout_handler) + +def decode_binary_string(binary_string): + return ''.join(chr(int(s[i*8:i*8+8],2)) for i in range(len(s)//8)) + +def decode_binary(binary_string): + return bytes(bytearray([int(binary_string[i:i+8], 2) for i in range(0, len(binary_string), 8)])) + + +def search_binary(content, message, date): + find = False + binary_list = re.findall(regex_binary, content) + if(len(binary_list) > 0): + + for binary in binary_list: + if len(binary) >= 40 : + decode = decode_binary(binary) + print(message) + + type = magic.from_buffer(decode, mime=True) + print(type) + + find = True + hash = sha1(decode).hexdigest() + print(hash) + + data = {} + data['name'] = hash + data['date'] = datetime.datetime.now().strftime("%d/%m/%y") + data['origin'] = message + data['estimated type'] = type + json_data = json.dumps(data) + + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.zincrby('binary_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this file encoding on this paste + if serv_metadata.zscore('binary_hash:'+hash, message) is None: + print('first binary') + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + + serv_metadata.sadd('binary_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby('binary_hash:'+hash, message, 1)# hash - paste map + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + serv_metadata.sadd('hash_binary_all_type', type) + serv_metadata.zincrby('binary_type:'+type, date_key, 1) + + save_binary_as_file(decode, type, hash, json_data, id) + print('found {} '.format(type)) + # duplicate + else: + serv_metadata.zincrby('binary_hash:'+hash, message, 1) # number of b64 on this paste + + if(find): + publisher.warning('binary decoded') + #Send to duplicate + p.populate_set_out(message, 'Duplicate') + #send to Browse_warning_paste + msg = ('binary;{}'.format(message)) + p.populate_set_out( msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="binary";{}'.format(message) + p.populate_set_out(msg, 'Tags') + +def save_binary_as_file(decode, type, hash, json_data, id): + + local_filename_b64 = os.path.join(p.config.get("Directories", "base64"), type, hash[:2], hash) + filename_b64 = os.path.join(os.environ['AIL_HOME'], local_filename_b64) + + filename_json = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "base64"), type, hash[:2], hash + '.json') + + dirname = os.path.dirname(filename_b64) + if not os.path.exists(dirname): + os.makedirs(dirname) + + with open(filename_b64, 'wb') as f: + f.write(decode) + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_b64) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_b64)) + + with open(filename_json, 'w') as f: + f.write(json_data) + + + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'Binary' + + # Setup the I/O queues + p = Process(config_section) + max_execution_time = p.config.getint("Binary", "max_execution_time") + + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + + # Sent to the logging a description of the module + publisher.info("Binary started") + + regex_binary = '[0-1]{40,}' + re.compile(regex_binary) + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + filename = message + paste = Paste.Paste(filename) + + signal.alarm(max_execution_time) + try: + # Do something with the message from the queue + #print(filename) + content = paste.get_p_content() + date = str(paste._get_p_date()) + search_binary(content,message, date) + + except TimeoutException: + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue + else: + signal.alarm(0) diff --git a/bin/Decoder.py b/bin/Decoder.py new file mode 100755 index 00000000..af385fed --- /dev/null +++ b/bin/Decoder.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" + Decoder module + + Dectect Binary and decode it +""" +import time +import os +import redis +import base64 +from hashlib import sha1 +import magic +import json +import datetime + +from pubsublogger import publisher + +from Helper import Process +from packages import Paste + +import re +import signal + +class TimeoutException(Exception): + pass + +def timeout_handler(signum, frame): + raise TimeoutException + +signal.signal(signal.SIGALRM, timeout_handler) + +def hex_decoder(hexStr): + #hexStr = ''.join( hex_string.split(" ") ) + return bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)])) + +def binary_decoder(binary_string): + return bytes(bytearray([int(binary_string[i:i+8], 2) for i in range(0, len(binary_string), 8)])) + +def base64_decoder(base64_string): + return base64.b64decode(base64_string) + +def decode_string(content, message, date, encoded_list, decoder_name, encoded_min_size): + find = False + for encoded in encoded_list: + if len(encoded) >= encoded_min_size: + decode = decoder_function[decoder_name](encoded) + find = True + + save_hash(decoder_name, message, date, decode) + + #remove encoded from paste content + content = content.replace(encoded, '', 1) + + if(find): + set_out_paste(decoder_name, message) + + return content + +# # TODO: FIXME check db +def save_hash(decoder_name, message, date, decoded): + print(decoder_name) + type = magic.from_buffer(decoded, mime=True) + hash = sha1(decoded).hexdigest() + print(hash) + + data = {} + data['name'] = hash + data['date'] = datetime.datetime.now().strftime("%d/%m/%y") + data['origin'] = message + data['estimated type'] = type + json_data = json.dumps(data) + + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.incrby(decoder_name+'_decoded:'+date_key, 1) + serv_metadata.zincrby('hash_date:'+date_key, hash, 1) + serv_metadata.zincrby(decoder_name+'_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this hash (all encoding) on this paste + if serv_metadata.zscore('nb_seen_hash:'+hash, message) is None: + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + serv_metadata.sadd('hash_paste:'+message, hash) # paste - hash map + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + + # first time we see this hash encoding on this paste + if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: + print('first '+decoder_name) + + serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map + + # create hash metadata + serv_metadata.sadd('hash_'+ decoder_name +'_all_type', type) + + # first time we see this hash today + if serv_metadata.zscore('hash_date:'+date_key, hash) is None: + serv_metadata.zincrby('hash_type:'+type, date_key, 1) + + # first time we see this hash encoding today + if serv_metadata.zscore(decoder_name+'_date:'+date_key, hash) is None: + serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) + + save_hash_on_disk(decoded, type, hash, json_data) + print('found {} '.format(type)) + + serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) + + serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) + + serv_metadata.zincrby('nb_seen_hash:'+hash, message, 1)# hash - paste map + serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1) # number of b64 on this paste + + +def save_hash_on_disk(decode, type, hash, json_data): + + local_filename_hash = os.path.join(p.config.get("Directories", "hash"), type, hash[:2], hash) + filename_hash = os.path.join(os.environ['AIL_HOME'], local_filename_hash) + + filename_json = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "hash"), type, hash[:2], hash + '.json') + + dirname = os.path.dirname(filename_hash) + if not os.path.exists(dirname): + os.makedirs(dirname) + + with open(filename_hash, 'wb') as f: + f.write(decode) + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_hash) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_hash)) + + with open(filename_json, 'w') as f: + f.write(json_data) + +def set_out_paste(decoder_name, message): + publisher.warning(decoder_name+' decoded') + #Send to duplicate + p.populate_set_out(message, 'Duplicate') + #send to Browse_warning_paste + msg = (decoder_name+';{}'.format(message)) + p.populate_set_out( msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="'+decoder_name+'";{}'.format(message) + p.populate_set_out(msg, 'Tags') + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'Decoder' + + # Setup the I/O queues + p = Process(config_section) + + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + + # Sent to the logging a description of the module + publisher.info("Decoder started") + + regex_binary = '[0-1]{40,}' + #regex_hex = '(0[xX])?[A-Fa-f0-9]{40,}' + regex_hex = '[A-Fa-f0-9]{40,}' + regex_base64 = '(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)' + + re.compile(regex_binary) + re.compile(regex_hex) + re.compile(regex_base64) + + # map decoder function + decoder_function = {'binary':binary_decoder,'hexadecimal':hex_decoder, 'base64':base64_decoder} + + hex_max_execution_time = p.config.getint("Hex", "max_execution_time") + binary_max_execution_time = p.config.getint("Binary", "max_execution_time") + base64_max_execution_time = p.config.getint("Base64", "max_execution_time") + + # list all decoder yith regex, + decoder_binary = {'name': 'binary', 'regex': regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time} + decoder_hexadecimal = {'name': 'hexadecimal', 'regex': regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time} + decoder_base64 = {'name': 'base64', 'regex': regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time} + + decoder_order = [ decoder_base64, decoder_binary, decoder_hexadecimal, decoder_base64] + + for decoder in decoder_order: + serv_metadata.sadd('all_decoder', decoder['name']) + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + filename = message + paste = Paste.Paste(filename) + + # Do something with the message from the queue + content = paste.get_p_content() + date = str(paste._get_p_date()) + + for decoder in decoder_order: # add threshold and size limit + + # max execution time on regex + signal.alarm(decoder['max_execution_time']) + try: + encoded_list = re.findall(decoder['regex'], content) + except TimeoutException: + encoded_list = [] + p.incr_module_timeout_statistic() # add encoder type + print ("{0} processing timeout".format(paste.p_path)) + continue + else: + signal.alarm(0) + + if(len(encoded_list) > 0): + content = decode_string(content, message, date, encoded_list, decoder['name'], decoder['encoded_min_size']) diff --git a/bin/Helper.py b/bin/Helper.py index 0bb4b410..52097ef6 100755 --- a/bin/Helper.py +++ b/bin/Helper.py @@ -136,6 +136,12 @@ class Process(object): db=self.config.get('RedisPubSub', 'db'), decode_responses=True) + self.serv_statistics = redis.StrictRedis( + host=self.config.get('ARDB_Statistics', 'host'), + port=self.config.get('ARDB_Statistics', 'port'), + db=self.config.get('ARDB_Statistics', 'db'), + decode_responses=True) + self.moduleNum = os.getpid() def populate_set_in(self): @@ -165,36 +171,39 @@ class Process(object): return None else: - #try: - if '.gz' in message: - path = message.split(".")[-2].split("/")[-1] - #find start of path with AIL_HOME - index_s = message.find(os.environ['AIL_HOME']) - #Stop when .gz - index_e = message.find(".gz")+3 - if(index_s == -1): - complete_path = message[0:index_e] + try: + if '.gz' in message: + path = message.split(".")[-2].split("/")[-1] + #find start of path with AIL_HOME + index_s = message.find(os.environ['AIL_HOME']) + #Stop when .gz + index_e = message.find(".gz")+3 + if(index_s == -1): + complete_path = message[0:index_e] + else: + complete_path = message[index_s:index_e] + else: - complete_path = message[index_s:index_e] + path = "-" + complete_path = "?" - else: - path = "-" - complete_path = "?" + value = str(timestamp) + ", " + path + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", complete_path) + self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) - value = str(timestamp) + ", " + path - self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) - self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", complete_path) - self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) - return message + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_in:'+self.subscriber_name, 1) + return message - #except: - #print('except') - #path = "?" - #value = str(timestamp) + ", " + path - #self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) - #self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", "?") - #self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) - #return message + except: + print('except') + path = "?" + value = str(timestamp) + ", " + path + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", "?") + self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) + return message def populate_set_out(self, msg, channel=None): # multiproc @@ -221,3 +230,7 @@ class Process(object): time.sleep(1) continue self.pubsub.publish(message) + + def incr_module_timeout_statistic(self): + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_timeout:'+self.subscriber_name, 1) diff --git a/bin/Keys.py b/bin/Keys.py index 7b1ec7dc..d3c292ba 100755 --- a/bin/Keys.py +++ b/bin/Keys.py @@ -71,6 +71,14 @@ def search_key(paste): p.populate_set_out(msg, 'Tags') find = True + if '---- BEGIN SSH2 ENCRYPTED PRIVATE KEY ----' in content: + publisher.warning('{} has an ssh2 private key message'.format(paste.p_name)) + print('SSH2 private key message found') + + msg = 'infoleak:automatic-detection="private-ssh-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') + find = True + if '-----BEGIN OpenVPN Static key V1-----' in content: publisher.warning('{} has an openssh private key message'.format(paste.p_name)) print('OpenVPN Static key message found') diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 998a676a..c3bfd8cf 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -168,7 +168,7 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Keys" bash -c 'cd '${AIL_BIN}'; ./Keys.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Base64" bash -c 'cd '${AIL_BIN}'; ./Base64.py; read x' + screen -S "Script_AIL" -X screen -t "Decoder" bash -c 'cd '${AIL_BIN}'; ./Decoder.py; read x' sleep 0.1 screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c 'cd '${AIL_BIN}'; ./Bitcoin.py; read x' sleep 0.1 diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py index d9daa299..7fa6b223 100755 --- a/bin/MISP_The_Hive_feeder.py +++ b/bin/MISP_The_Hive_feeder.py @@ -150,16 +150,18 @@ if __name__ == "__main__": if flag_the_hive: try: HiveApi = TheHiveApi(the_hive_url, the_hive_key, cert = the_hive_verifycert) - r_serv_db.set('ail:thehive', True) except: HiveApi = False flag_the_hive = False r_serv_db.set('ail:thehive', False) print('Not connected to The HIVE') + else: + HiveApi = False if HiveApi != False and flag_the_hive: try: HiveApi.get_alert(0) + r_serv_db.set('ail:thehive', True) print('Connected to The HIVE:', the_hive_url) except thehive4py.exceptions.AlertException: HiveApi = False diff --git a/bin/Mixer.py b/bin/Mixer.py index 98709ea5..96f20815 100755 --- a/bin/Mixer.py +++ b/bin/Mixer.py @@ -68,6 +68,12 @@ if __name__ == '__main__': db=cfg.getint("Redis_Mixer_Cache", "db"), decode_responses=True) + server_cache = redis.StrictRedis( + host=cfg.get("Redis_Log_submit", "host"), + port=cfg.getint("Redis_Log_submit", "port"), + db=cfg.getint("Redis_Log_submit", "db"), + decode_responses=True) + # LOGGING # publisher.info("Feed Script started to receive & publish.") @@ -184,7 +190,17 @@ if __name__ == '__main__': publisher.debug("Empty Paste: {0} not processed".format(message)) else: print("Empty Queues: Waiting...") + if int(time.time() - time_1) > refresh_time: + # update internal feeder + list_feeder = server_cache.hkeys("mixer_cache:list_feeder") + if list_feeder: + for feeder in list_feeder: + count = int(server_cache.hget("mixer_cache:list_feeder", feeder)) + if count is None: + count = 0 + processed_paste_per_feeder[feeder] = processed_paste_per_feeder.get(feeder, 0) + count + processed_paste = processed_paste + count print(processed_paste_per_feeder) to_print = 'Mixer; ; ; ;mixer_all All_feeders Processed {0} paste(s) in {1}sec'.format(processed_paste, refresh_time) print(to_print) @@ -204,5 +220,8 @@ if __name__ == '__main__': duplicated_paste_per_feeder[feeder] = 0 time_1 = time.time() + + # delete internal feeder list + server_cache.delete("mixer_cache:list_feeder") time.sleep(0.5) continue diff --git a/bin/Release.py b/bin/Release.py index 6e7a8277..43c84b04 100755 --- a/bin/Release.py +++ b/bin/Release.py @@ -37,6 +37,7 @@ if __name__ == "__main__": regex = '|'.join(regexs) while True: + signal.alarm(max_execution_time) filepath = p.get_from_set() if filepath is None: publisher.debug("Script Release is Idling 10s") @@ -47,7 +48,7 @@ if __name__ == "__main__": paste = Paste.Paste(filepath) content = paste.get_p_content() - signal.alarm(max_execution_time) + #signal.alarm(max_execution_time) try: releases = set(re.findall(regex, content)) if len(releases) == 0: @@ -61,7 +62,8 @@ if __name__ == "__main__": publisher.info(to_print) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) diff --git a/bin/SentimentAnalysis.py b/bin/SentimentAnalysis.py index 34beea3f..8442befa 100755 --- a/bin/SentimentAnalysis.py +++ b/bin/SentimentAnalysis.py @@ -167,6 +167,7 @@ if __name__ == '__main__': try: Analyse(message, server) except TimeoutException: + p.incr_module_timeout_statistic() print ("{0} processing timeout".format(message)) continue else: diff --git a/bin/Tags.py b/bin/Tags.py index 15f8f837..0a178fef 100755 --- a/bin/Tags.py +++ b/bin/Tags.py @@ -11,6 +11,7 @@ This module create tags. import redis import time +import datetime from pubsublogger import publisher from Helper import Process @@ -41,6 +42,12 @@ if __name__ == '__main__': db=p.config.get("ARDB_Metadata", "db"), decode_responses=True) + serv_statistics = redis.StrictRedis( + host=p.config.get('ARDB_Statistics', 'host'), + port=p.config.get('ARDB_Statistics', 'port'), + db=p.config.get('ARDB_Statistics', 'db'), + decode_responses=True) + # Sent to the logging a description of the module publisher.info("Tags module started") @@ -67,4 +74,6 @@ if __name__ == '__main__': print(" tagged: {}".format(tag)) server_metadata.sadd('tag:'+path, tag) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_tagged:'+tag, 1) p.populate_set_out(message, 'MISP_The_Hive_feeder') diff --git a/bin/Tokenize.py b/bin/Tokenize.py index fdefeb6a..698b4fbc 100755 --- a/bin/Tokenize.py +++ b/bin/Tokenize.py @@ -60,8 +60,9 @@ if __name__ == "__main__": msg = '{} {} {}'.format(paste.p_path, word, score) p.populate_set_out(msg) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) else: diff --git a/bin/Web.py b/bin/Web.py index 45e5bfbe..3d53e306 100755 --- a/bin/Web.py +++ b/bin/Web.py @@ -124,6 +124,7 @@ if __name__ == "__main__": except ipaddress.AddressValueError: continue cc = getattr(l, 'cc') + asn = '' if getattr(l, 'asn') is not None: asn = getattr(l, 'asn')[2:] #remobe b' diff --git a/bin/feeder/pystemon-feeder.py b/bin/feeder/pystemon-feeder.py index 50ffaeba..a59a0a5b 100755 --- a/bin/feeder/pystemon-feeder.py +++ b/bin/feeder/pystemon-feeder.py @@ -62,12 +62,13 @@ while True: print(paste) if paste is None: continue - socket.send("%d %s" % (topic, paste)) + socket.send_string("%d %s" % (topic, paste)) topic = 102 try: - messagedata = open(pystemonpath+paste).read() - socket.send("%d %s %s" % (topic, paste, base64.b64encode(messagedata))) - sleep_inc = sleep_inc-0.01 if sleep_inc-0.01 > 0 else 0 + with open(pystemonpath+paste, 'rb') as f: #.read() + messagedata = f.read() + socket.send_string("%d %s %s" % (topic, paste, base64.b64encode(messagedata).decode())) + sleep_inc = sleep_inc-0.01 if sleep_inc-0.01 > 0 else 0 except IOError as e: # file not found, could be a buffering issue -> increase sleeping time print('IOError: Increasing sleep time') diff --git a/bin/packages/Paste.py b/bin/packages/Paste.py index d1e3f0d3..6942cb31 100755 --- a/bin/packages/Paste.py +++ b/bin/packages/Paste.py @@ -52,7 +52,7 @@ class Paste(object): :Example: - PST = Paste("/home/2013/ZEeGaez5.gz") + PST = Paste("/home/2013/01/12/ZEeGaez5.gz") """ diff --git a/bin/packages/config.cfg.sample b/bin/packages/config.cfg.sample index 2ed662c1..eead0357 100644 --- a/bin/packages/config.cfg.sample +++ b/bin/packages/config.cfg.sample @@ -2,6 +2,7 @@ bloomfilters = Blooms dicofilters = Dicos pastes = PASTES +hash = HASHS base64 = BASE64 wordtrending_csv = var/www/static/csv/wordstrendingdata @@ -31,6 +32,8 @@ sender_port = 1337 ##### Flask ##### [Flask] +#Number of logs to display in the dashboard +max_dashboard_logs = 15 #Maximum number of character to display in the toolip max_preview_char = 250 #Maximum number of character to display in the modal @@ -65,6 +68,14 @@ max_execution_time = 90 path = Base64/ max_execution_time = 60 +[Binary] +path = Base64/ +max_execution_time = 60 + +[Hex] +path = Base64/ +max_execution_time = 60 + [Modules_Duplicates] #Number of month to look back maximum_month_range = 3 diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 452850f7..8cd8f570 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -125,7 +125,7 @@ publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags subscribe = Redis_ApiKey publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags -[Base64] +[Decoder] subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags diff --git a/bin/submit_paste.py b/bin/submit_paste.py index 5f18fd6a..a999ec39 100755 --- a/bin/submit_paste.py +++ b/bin/submit_paste.py @@ -40,6 +40,9 @@ def create_paste(uuid, paste_content, ltags, ltagsgalaxies, name): relay_message = "{0} {1}".format(save_path, gzip64encoded) p.populate_set_out(relay_message, 'Mixer') + # increase nb of paste by feeder name + r_serv_log_submit.hincrby("mixer_cache:list_feeder", "submitted", 1) + # add tags add_tags(ltags, ltagsgalaxies, full_path) @@ -52,6 +55,9 @@ def create_paste(uuid, paste_content, ltags, ltagsgalaxies, name): print(' {} send to Global'.format(save_path)) r_serv_log_submit.sadd(uuid + ':paste_submit_link', full_path) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_paste', 1) + return 0 def addError(uuid, errorMessage): @@ -64,6 +70,8 @@ def addError(uuid, errorMessage): def abord_file_submission(uuid, errorMessage): addError(uuid, errorMessage) r_serv_log_submit.set(uuid + ':end', 1) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_abord', 1) remove_submit_uuid(uuid) @@ -158,6 +166,12 @@ if __name__ == "__main__": db=cfg.getint("ARDB_Metadata", "db"), decode_responses=True) + serv_statistics = redis.StrictRedis( + host=cfg.get('ARDB_Statistics', 'host'), + port=cfg.getint('ARDB_Statistics', 'port'), + db=cfg.getint('ARDB_Statistics', 'db'), + decode_responses=True) + expire_time = 120 MAX_FILE_SIZE = 1000000000 ALLOWED_EXTENSIONS = ['txt', 'sh', 'pdf'] diff --git a/doc/screenshots/ail-hashedfiles.png b/doc/screenshots/ail-hashedfiles.png new file mode 100644 index 00000000..4589d429 Binary files /dev/null and b/doc/screenshots/ail-hashedfiles.png differ diff --git a/doc/screenshots/hashedfile-graph.png b/doc/screenshots/hashedfile-graph.png new file mode 100644 index 00000000..01f365d8 Binary files /dev/null and b/doc/screenshots/hashedfile-graph.png differ diff --git a/var/www/modules/Flask_config.py b/var/www/modules/Flask_config.py index 2c3e736a..256ea3a8 100644 --- a/var/www/modules/Flask_config.py +++ b/var/www/modules/Flask_config.py @@ -145,3 +145,18 @@ bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info'] UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted') max_dashboard_logs = int(cfg.get("Flask", "max_dashboard_logs")) + +# VT +try: + from virusTotalKEYS import vt_key + if vt_key != '': + vt_auth = vt_key + vt_enabled = True + print('VT submission is enabled') + else: + vt_enabled = False + print('VT submission is disabled') +except: + vt_auth = {'apikey': cfg.get("Flask", "max_preview_char")} + vt_enabled = False + print('VT submission is disabled') diff --git a/var/www/modules/PasteSubmit/templates/PasteSubmit.html b/var/www/modules/PasteSubmit/templates/PasteSubmit.html index ce1fb29f..a0636332 100644 --- a/var/www/modules/PasteSubmit/templates/PasteSubmit.html +++ b/var/www/modules/PasteSubmit/templates/PasteSubmit.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Submit Paste - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> @@ -31,6 +32,8 @@ <div class="row"> <!-- /.col-lg-12 --> + + <form action="/PasteSubmit/submit" id="pasteSubmitForm" method="post" enctype=multipart/form-data onsubmit="submitPaste()"> <input type="hidden" id="tags_taxonomies" name="tags_taxonomies" value="test"> diff --git a/var/www/modules/PasteSubmit/templates/edit_tag_export.html b/var/www/modules/PasteSubmit/templates/edit_tag_export.html index 01822042..d9e8a471 100644 --- a/var/www/modules/PasteSubmit/templates/edit_tag_export.html +++ b/var/www/modules/PasteSubmit/templates/edit_tag_export.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework</title> + <title>Tags Export - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/PasteSubmit/templates/header_PasteSubmit.html b/var/www/modules/PasteSubmit/templates/header_PasteSubmit.html index 9abd9029..4a6a3b77 100644 --- a/var/www/modules/PasteSubmit/templates/header_PasteSubmit.html +++ b/var/www/modules/PasteSubmit/templates/header_PasteSubmit.html @@ -1 +1 @@ -<li id='page-PasteSubmit'><a href="{{ url_for('PasteSubmit.PasteSubmit_page') }}"><i class="glyphicon glyphicon-new-window white"></i> PasteSubmit </a></li> +<li id='page-PasteSubmit'><a href="{{ url_for('PasteSubmit.PasteSubmit_page') }}"><i class="glyphicon glyphicon-new-window white"></i> Submit Paste </a></li> diff --git a/var/www/modules/PasteSubmit/templates/submiting.html b/var/www/modules/PasteSubmit/templates/submiting.html index b7ad78e3..6174742f 100644 --- a/var/www/modules/PasteSubmit/templates/submiting.html +++ b/var/www/modules/PasteSubmit/templates/submiting.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Submit Paste - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/Tags.html b/var/www/modules/Tags/templates/Tags.html index c9ecb06d..143fddb3 100644 --- a/var/www/modules/Tags/templates/Tags.html +++ b/var/www/modules/Tags/templates/Tags.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Tags - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/edit_galaxy.html b/var/www/modules/Tags/templates/edit_galaxy.html index c6e10f6c..0d1b1bf9 100644 --- a/var/www/modules/Tags/templates/edit_galaxy.html +++ b/var/www/modules/Tags/templates/edit_galaxy.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Edit Galaxy - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/edit_taxonomie.html b/var/www/modules/Tags/templates/edit_taxonomie.html index 74ea5b9c..45f44282 100644 --- a/var/www/modules/Tags/templates/edit_taxonomie.html +++ b/var/www/modules/Tags/templates/edit_taxonomie.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Edit Taxonomie - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/galaxies.html b/var/www/modules/Tags/templates/galaxies.html index 5013c356..2dc6d473 100644 --- a/var/www/modules/Tags/templates/galaxies.html +++ b/var/www/modules/Tags/templates/galaxies.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Galaxies - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/tag_galaxy_info.html b/var/www/modules/Tags/templates/tag_galaxy_info.html index a1544fcd..9ee05049 100644 --- a/var/www/modules/Tags/templates/tag_galaxy_info.html +++ b/var/www/modules/Tags/templates/tag_galaxy_info.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Galaxy Tag Info - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/tagged.html b/var/www/modules/Tags/templates/tagged.html index 1053041f..c0628eb7 100644 --- a/var/www/modules/Tags/templates/tagged.html +++ b/var/www/modules/Tags/templates/tagged.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Tags - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="/static//css/bootstrap.min.css" rel="stylesheet"> diff --git a/var/www/modules/Tags/templates/taxonomies.html b/var/www/modules/Tags/templates/taxonomies.html index 74feda6f..83d8a930 100644 --- a/var/www/modules/Tags/templates/taxonomies.html +++ b/var/www/modules/Tags/templates/taxonomies.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Taxonomies - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/browsepastes/templates/browse_important_paste.html b/var/www/modules/browsepastes/templates/browse_important_paste.html index faa7ed3d..a68e0f48 100644 --- a/var/www/modules/browsepastes/templates/browse_important_paste.html +++ b/var/www/modules/browsepastes/templates/browse_important_paste.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Browse Important Paste - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/dashboard/templates/index.html b/var/www/modules/dashboard/templates/index.html index 33ba2781..e7331b6f 100644 --- a/var/www/modules/dashboard/templates/index.html +++ b/var/www/modules/dashboard/templates/index.html @@ -6,6 +6,7 @@ <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Analysis Information Leak framework Dashboard</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/hashDecoded/Flask_hashDecoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py new file mode 100644 index 00000000..a40a5a00 --- /dev/null +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -0,0 +1,629 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +''' + Flask functions and routes for the trending modules page +''' +import redis +import os +import datetime +import json +from Date import Date + +from io import BytesIO +import zipfile + +import requests +from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, send_file + +# ============ VARIABLES ============ +import Flask_config + +app = Flask_config.app +cfg = Flask_config.cfg +r_serv_metadata = Flask_config.r_serv_metadata +vt_enabled = Flask_config.vt_enabled +vt_auth = Flask_config.vt_auth + +hashDecoded = Blueprint('hashDecoded', __name__, template_folder='templates') + +# ============ FUNCTIONS ============ + +def get_date_range(num_day): + curr_date = datetime.date.today() + date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2)) + date_list = [] + + for i in range(0, num_day+1): + date_list.append(date.substract_day(i)) + + return list(reversed(date_list)) + +def substract_date(date_from, date_to): + date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8])) + date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8])) + delta = date_to - date_from # timedelta + l_date = [] + for i in range(delta.days + 1): + date = date_from + datetime.timedelta(i) + l_date.append( date.strftime('%Y%m%d') ) + return l_date + +def list_sparkline_values(date_range_sparkline, hash): + sparklines_value = [] + for date_day in date_range_sparkline: + nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date_day, hash) + if nb_seen_this_day is None: + nb_seen_this_day = 0 + sparklines_value.append(int(nb_seen_this_day)) + return sparklines_value + +def get_file_icon(estimated_type): + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file-o' + + return file_icon + +def get_file_icon_text(estimated_type): + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon_text = '\uf15b' + elif file_type == 'audio': + file_icon_text = '\uf1c7' + elif file_type == 'image': + file_icon_text = '\uf1c5' + elif file_type == 'text': + file_icon_text = '\uf15c' + else: + file_icon_text = '\uf15b' + + return file_icon_text + +def one(): + return 1 + +# ============= ROUTES ============== +@hashDecoded.route("/hashDecoded/all_hash_search", methods=['POST']) +def all_hash_search(): + date_from = request.form.get('date_from') + date_to = request.form.get('date_to') + type = request.form.get('type') + encoding = request.form.get('encoding') + return redirect(url_for('hashDecoded.hashDecoded_page', date_from=date_from, date_to=date_to, type=type, encoding=encoding)) + +@hashDecoded.route("/hashDecoded/", methods=['GET']) +def hashDecoded_page(): + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + type = request.args.get('type') + encoding = request.args.get('encoding') + + if type == 'All types': + type = None + + if encoding == 'All encoding': + encoding = None + + #date_from = '20180628' or date_from = '2018-06-28' + #date_to = '20180628' or date_to = '2018-06-28' + + # verify file type input + if type is not None: + #retrieve + char + type = type.replace(' ', '+') + if type not in r_serv_metadata.smembers('hash_all_type'): + type = None + + all_encoding = r_serv_metadata.smembers('all_decoder') + # verify encoding input + if encoding is not None: + if encoding not in all_encoding: + encoding = None + + date_range = [] + if date_from is not None and date_to is not None: + #change format + try: + if len(date_from) != 8: + date_from = date_from[0:4] + date_from[5:7] + date_from[8:10] + date_to = date_to[0:4] + date_to[5:7] + date_to[8:10] + date_range = substract_date(date_from, date_to) + except: + pass + + if not date_range: + date_range.append(datetime.date.today().strftime("%Y%m%d")) + date_from = date_range[0][0:4] + '-' + date_range[0][4:6] + '-' + date_range[0][6:8] + date_to = date_from + + else: + date_from = date_from[0:4] + '-' + date_from[4:6] + '-' + date_from[6:8] + date_to = date_to[0:4] + '-' + date_to[4:6] + '-' + date_to[6:8] + + # display day type bar chart + if len(date_range) == 1 and type is None: + daily_type_chart = True + daily_date = date_range[0] + else: + daily_type_chart = False + daily_date = None + + l_64 = set() + for date in date_range: + if encoding is None: + l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1) + else: + l_hash = r_serv_metadata.zrange(encoding+'_date:' +date, 0, -1) + if l_hash: + for hash in l_hash: + l_64.add(hash) + + num_day_sparkline = 6 + date_range_sparkline = get_date_range(num_day_sparkline) + + b64_metadata = [] + l_64 = list(l_64) + for hash in l_64: + # select requested base 64 type + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + if type is not None: + if estimated_type is not None: + if estimated_type != type: + continue + + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + + if hash is not None and first_seen is not None and \ + last_seen is not None and \ + nb_seen_in_paste is not None and \ + size is not None: + + file_icon = get_file_icon(estimated_type) + + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + else: + b64_vt = False + b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + # hash never refreshed + if b64_vt_report is None: + b64_vt_report = '' + + sparklines_value = list_sparkline_values(date_range_sparkline, hash) + + b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, b64_vt_report, sparklines_value) ) + + l_type = r_serv_metadata.smembers('hash_all_type') + + return render_template("hashDecoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, l_type=l_type, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date, + encoding=encoding, all_encoding=all_encoding, date_from=date_from, date_to=date_to) + +@hashDecoded.route('/hashDecoded/hash_by_type') +def hash_by_type(): + type = request.args.get('type') + type = 'text/plain' + return render_template('hash_type.html',type = type) + +@hashDecoded.route('/hashDecoded/hash_hash') +def hash_hash(): + hash = request.args.get('hash') + return render_template('hash_hash.html') + +@hashDecoded.route('/hashDecoded/showHash') +def showHash(): + hash = request.args.get('hash') + #hash = 'e02055d3efaad5d656345f6a8b1b6be4fe8cb5ea' + + # TODO FIXME show error + if hash is None: + return hashDecoded_page() + + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + # hash not found + # TODO FIXME show error + if estimated_type is None: + return hashDecoded_page() + + else: + file_icon = get_file_icon(estimated_type) + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_all_pastes = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + + # get all encoding for this hash + list_hash_decoder = [] + list_decoder = r_serv_metadata.smembers('all_decoder') + for decoder in list_decoder: + encoding = r_serv_metadata.hget('metadata_hash:'+hash, decoder+'_decoder') + if encoding is not None: + list_hash_decoder.append({'encoding': decoder, 'nb_seen': encoding}) + + num_day_type = 6 + date_range_sparkline = get_date_range(num_day_type) + sparkline_values = list_sparkline_values(date_range_sparkline, hash) + + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + else: + b64_vt = False + b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + # hash never refreshed + if b64_vt_report is None: + b64_vt_report = '' + + return render_template('showHash.html', hash=hash, vt_enabled=vt_enabled, b64_vt=b64_vt, b64_vt_link=b64_vt_link, + b64_vt_report=b64_vt_report, + size=size, estimated_type=estimated_type, file_icon=file_icon, + first_seen=first_seen, list_hash_decoder=list_hash_decoder, + last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) + +@app.route('/hashDecoded/downloadHash') +def downloadHash(): + hash = request.args.get('hash') + # sanitize hash + hash = hash.split('/')[0] + + # hash exist + if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None: + + b64_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + hash_content = '' + try: + with open(b64_full_path, 'rb') as f: + hash_content = f.read() + + # zip buffer + result = BytesIO() + temp = BytesIO() + temp.write(hash_content) + + with zipfile.ZipFile(result, "w") as zf: + #zf.setpassword(b"infected") + zf.writestr( hash, temp.getvalue()) + + filename = hash + '.zip' + result.seek(0) + + return send_file(result, attachment_filename=filename, as_attachment=True) + except Exception as e: + print(e) + return 'Server Error' + else: + return 'hash: ' + hash + " don't exist" + +@hashDecoded.route('/hashDecoded/hash_by_type_json') +def hash_by_type_json(): + type = request.args.get('type') + + #retrieve + char + type = type.replace(' ', '+') + + num_day_type = 30 + date_range = get_date_range(num_day_type) + + #verify input + if type in r_serv_metadata.smembers('hash_all_type'): + type_value = [] + all_decoder = r_serv_metadata.smembers('all_decoder') + + range_decoder = [] + for date in date_range: + day_decoder = {} + day_decoder['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8] + for decoder in all_decoder: + num_day_decoder = r_serv_metadata.zscore(decoder+'_type:'+type, date) + if num_day_decoder is None: + num_day_decoder = 0 + day_decoder[decoder]= num_day_decoder + range_decoder.append(day_decoder) + + + + return jsonify(range_decoder) + else: + return jsonify() + +@hashDecoded.route('/hashDecoded/decoder_type_json') +def decoder_type_json(): + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + + typ = request.args.get('type') + + if typ == 'All types': + typ = None + + # verify file type input + if typ is not None: + #retrieve + char + typ = typ.replace(' ', '+') + if typ not in r_serv_metadata.smembers('hash_all_type'): + typ = None + + all_decoder = r_serv_metadata.smembers('all_decoder') + # sort DESC decoder for color + all_decoder = sorted(all_decoder) + + date_range = [] + if date_from is not None and date_to is not None: + #change format + try: + if len(date_from) != 8: + date_from = date_from[0:4] + date_from[5:7] + date_from[8:10] + date_to = date_to[0:4] + date_to[5:7] + date_to[8:10] + date_range = substract_date(date_from, date_to) + except: + pass + + if not date_range: + date_range.append(datetime.date.today().strftime("%Y%m%d")) + + nb_decoded = {} + for decoder in all_decoder: + nb_decoded[decoder] = 0 + + for date in date_range: + for decoder in all_decoder: + if typ is None: + nb_decod = r_serv_metadata.get(decoder+'_decoded:'+date) + else: + nb_decod = r_serv_metadata.zscore(decoder+'_type:'+typ, date) + + if nb_decod is not None: + nb_decoded[decoder] = nb_decoded[decoder] + int(nb_decod) + + to_json = [] + for decoder in all_decoder: + to_json.append({'name': decoder, 'value': nb_decoded[decoder]}) + return jsonify(to_json) + + +@hashDecoded.route('/hashDecoded/daily_type_json') +def daily_type_json(): + date = request.args.get('date') + + daily_type = set() + l_b64 = r_serv_metadata.zrange('hash_date:' +date, 0, -1) + for hash in l_b64: + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + if estimated_type is not None: + daily_type.add(estimated_type) + + type_value = [] + for day_type in daily_type: + num_day_type = r_serv_metadata.zscore('hash_type:'+day_type, date) + type_value.append({ 'date' : day_type, 'value' : int( num_day_type )}) + + return jsonify(type_value) + +@hashDecoded.route('/hashDecoded/range_type_json') +def range_type_json(): + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + + date_range = [] + if date_from is not None and date_to is not None: + #change format + if len(date_from) != 8: + date_from = date_from[0:4] + date_from[5:7] + date_from[8:10] + date_to = date_to[0:4] + date_to[5:7] + date_to[8:10] + date_range = substract_date(date_from, date_to) + + if not date_range: + date_range.append(datetime.date.today().strftime("%Y%m%d")) + + all_type = set() + for date in date_range: + l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1) + if l_hash: + for hash in l_hash: + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + all_type.add(estimated_type) + + range_type = [] + + for date in date_range: + if len(date_range) == 1: + if date==date_from and date==date_to: + for type in all_type: + day_type = {} + day_type['date']= type + list_decoder = r_serv_metadata.smembers('all_decoder') + for decoder in list_decoder: + num_day_decoder = r_serv_metadata.zscore(decoder+'_type:'+type, date) + if num_day_decoder is None: + num_day_decoder = 0 + day_type[decoder]= num_day_decoder + range_type.append(day_type) + else: + range_type = '' + else: + day_type = {} + day_type['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8] + for type in all_type: + num_day_type = r_serv_metadata.zscore('hash_type:'+type, date) + if num_day_type is None: + num_day_type = 0 + day_type[type]= num_day_type + range_type.append(day_type) + + return jsonify(range_type) + +@hashDecoded.route('/hashDecoded/hash_graph_line_json') +def hash_graph_line_json(): + hash = request.args.get('hash') + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + + #hash = '9c748d28d78a64aef99e7ba866a433eb635c6d7a' + + if date_from is None or date_to is None: + nb_days_seen_in_pastes = 30 + else: + # # TODO: # FIXME: + nb_days_seen_in_pastes = 30 + + date_range_seen_in_pastes = get_date_range(nb_days_seen_in_pastes) + + #verify input + if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None: + json_seen_in_paste = [] + for date in date_range_seen_in_pastes: + nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date, hash) + if nb_seen_this_day is None: + nb_seen_this_day = 0 + date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] + json_seen_in_paste.append({ 'date' : date, 'value' : int( nb_seen_this_day )}) + + return jsonify(json_seen_in_paste) + else: + return jsonify() + + +@hashDecoded.route('/hashDecoded/hash_graph_node_json') +def hash_graph_node_json(): + hash = request.args.get('hash') + + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + + if hash is not None and estimated_type is not None: + + nodes_set_hash = set() + nodes_set_paste = set() + links_set = set() + + url = hash + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + + nodes_set_hash.add((hash, 1, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) + + #get related paste + l_pastes = r_serv_metadata.zrange('nb_seen_hash:'+hash, 0, -1) + for paste in l_pastes: + url = paste + #nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, paste)) + nb_hash_in_paste = r_serv_metadata.scard('hash_paste:'+paste) + + nodes_set_paste.add((paste, 2,nb_hash_in_paste,url)) + links_set.add((hash, paste)) + + l_hash = r_serv_metadata.smembers('hash_paste:'+paste) + for child_hash in l_hash: + if child_hash != hash: + url = child_hash + first_seen = r_serv_metadata.hget('metadata_hash:'+child_hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+child_hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+child_hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+child_hash, 'size') + estimated_type = r_serv_metadata.hget('metadata_hash:'+child_hash, 'estimated_type') + + nodes_set_hash.add((child_hash, 3, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) + links_set.add((child_hash, paste)) + + #l_pastes_child = r_serv_metadata.zrange('nb_seen_hash:'+child_hash, 0, -1) + #for child_paste in l_pastes_child: + + nodes = [] + for node in nodes_set_hash: + nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('hashDecoded.showHash', hash=node[7]), 'hash': True}) + for node in nodes_set_paste: + nodes.append({"id": node[0], "group": node[1], "nb_seen_in_paste": node[2],"url": url_for('showsavedpastes.showsavedpaste', paste=node[3]), 'hash': False}) + links = [] + for link in links_set: + links.append({"source": link[0], "target": link[1]}) + json = {"nodes": nodes, "links": links} + return jsonify(json) + + else: + return jsonify({}) + +@hashDecoded.route('/hashDecoded/hash_types') +def hash_types(): + date_from = 20180701 + date_to = 20180706 + return render_template('hash_types.html', date_from=date_from, date_to=date_to) + +@hashDecoded.route('/hashDecoded/send_file_to_vt_js') +def send_file_to_vt_js(): + hash = request.args.get('hash') + + b64_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + b64_content = '' + with open(b64_full_path, 'rb') as f: + b64_content = f.read() + + files = {'file': (hash, b64_content)} + response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params= {'apikey': vt_auth}) + json_response = response.json() + #print(json_response) + + vt_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_link) + vt_report = 'Please Refresh' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_report', vt_report) + + return jsonify({'vt_link': vt_link, 'vt_report': vt_report}) + + +@hashDecoded.route('/hashDecoded/update_vt_result') +def update_vt_result(): + hash = request.args.get('hash') + + params = {'apikey': vt_auth, 'resource': hash} + response = requests.get('https://www.virustotal.com/vtapi/v2/file/report',params=params) + if response.status_code == 200: + json_response = response.json() + response_code = json_response['response_code'] + # report exist + if response_code == 1: + total = json_response['total'] + positive = json_response['positives'] + + b64_vt_report = 'Detection {}/{}'.format(positive,total) + # no report found + elif response_code == 0: + b64_vt_report = 'No report found' + pass + # file in queue + elif response_code == -2: + b64_vt_report = 'File in queue' + pass + + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_report', b64_vt_report) + return jsonify(hash=hash, report_vt=b64_vt_report) + elif response.status_code == 403: + Flask_config.vt_enabled = False + print('VT is disabled') + return jsonify() + else: + # TODO FIXME make json response + return jsonify() + +# ========= REGISTRATION ========= +app.register_blueprint(hashDecoded) diff --git a/var/www/modules/hashDecoded/templates/hashDecoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html new file mode 100644 index 00000000..f65f647b --- /dev/null +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -0,0 +1,680 @@ +<!DOCTYPE html> +<html> + + <head> + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width, initial-scale=1.0"> + + <title>HashesDecoded - AIL</title> + + <!-- Core CSS --> + <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> + <link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet"> + <link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet"> + <link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" /> + <link href="{{ url_for('static', filename='css/daterangepicker.min.css') }}" rel="stylesheet" type="text/css" /> + <!-- JS --> + <script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script> + <script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script> + <script src="{{ url_for('static', filename='js/dataTables.bootstrap.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.flot.time.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.flot.stack.js') }}"></script> + <script language="javascript" src="{{ url_for('static', filename='js/moment.min.js') }}"></script> + <script language="javascript" src="{{ url_for('static', filename='js/jquery.daterangepicker.min.js') }}"></script> + <script language="javascript" src="{{ url_for('static', filename='js/d3.min.js') }}"></script> + <style> + .input-group .form-control { + position: unset; + } + .red_table thead{ + background: #d91f2d; + color: #fff; + } + .line { + fill: none; + stroke: #000; + stroke-width: 2.0px; + } + .bar { + fill: steelblue; + } + .bar:hover{ + fill: brown; + cursor: pointer; + } + .bar_stack:hover{ + cursor: pointer; + } + .pie_path:hover{ + cursor: pointer; + } + .svgText { + pointer-events: none; + } + div.tooltip { + position: absolute; + text-align: center; + padding: 2px; + font: 12px sans-serif; + background: #ebf4fb; + border: 2px solid #b7ddf2; + border-radius: 8px; + pointer-events: none; + color: #000000; + } + </style> + </head> + <body> + + {% include 'navbar.html' %} + + <div id="page-wrapper"> + <div class="row"> + <div class="col-lg-12"> + <h1 class="page-header" data-page="page-termsfrequency" >Hashed Files</h1> + <div> + + </div> + </div> + <!-- /.col-lg-12 --> + + <div class="row"> + <div class="col-md-10"> + <div id="barchart_type"> + </div> + </div> + + <div class="col-md-2"> + <div class="panel panel-info" style="text-align:center;"> + <div class="panel-heading"> + Select a date range : + <form action="/hashDecoded/all_hash_search" id="hash_selector_form" method='post'> + <div class="input-group"> + <span class="input-group-addon"><i class="fa fa-calendar fa" aria-hidden="true"></i></span> + <input class="form-control" id="date-range-from" placeholder="yyyy-mm-dd" value="{{ date_from }}" name="date_from"> + </div> + <div class="input-group"> + <span class="input-group-addon"><i class="fa fa-calendar fa" aria-hidden="true"></i></span> + <input class="form-control" id="date-range-to" placeholder="yyyy-mm-dd" value="{{ date_to }}" name="date_to"> + </div> + Encoding : + <select class="form-control" name="encoding" style="width=100%;"> + <option>All encoding</option> + {% for encod in all_encoding %} + {% if encoding|string() == encod|string() %} + <option selected>{{ encod }}</option> + {% else %} + <option>{{ encod }}</option> + {% endif %} + {% endfor %} + </select> + File Type : + <select class="form-control" name="type" style="width=100%;"> + <option>All types</option> + {% for typ in l_type %} + {% if type|string() == typ|string() %} + <option selected>{{ typ }}</option> + {% else %} + <option>{{ typ }}</option> + {% endif %} + {% endfor %} + </select> + <br> + <button class="btn btn-primary" style="text-align:center;"> + <i class="fa fa-files-o"></i> Search + </button> + <form> + </div> + </div> + <div id="pie_chart_encoded"> + </div> + </div> + </div> + </div> + + <!-- /#page-wrapper --> + {% if l_64|length != 0 %} + {% if date_from|string == date_to|string %} + <h3> {{ date_from }} Hashed files: </h3> + {% else %} + <h3> {{ date_from }} to {{ date_to }} Hashed files: </h3> + {% endif %} + <table id="tableb64" class="red_table table table-striped table-bordered"> + <thead> + <tr> + <th>estimated type</th> + <th>hash</th> + <th>first seen</th> + <th>last seen</th> + <th>nb paste</th> + <th>size</th> + <th>Virus Total</th> + <th>Sparkline</th> + </tr> + </thead> + <tbody> + {% for b64 in l_64 %} + <tr> + <td><i class="fa {{ b64[0] }}"></i> {{ b64[1] }}</td> + <td><a target="_blank" href="{{ url_for('hashDecoded.showHash') }}?hash={{ b64[2] }}">{{ b64[2] }}</a></td> + <td>{{ b64[5] }}</td> + <td>{{ b64[6] }}</td> + <td>{{ b64[3] }}</td> + <td>{{ b64[4] }}</td> + <td style="text-align:center;max-width:150px;"> + {% if vt_enabled %} + {% if not b64[7] %} + <darkbutton_{{ b64[2] }}> + <button id="submit_vt_{{ b64[2] }}" class="btn btn-primary" onclick="sendFileToVT('{{ b64[2] }}')"> + <i class="fa fa-paper-plane"></i> Send this file to VT + </button> + </darkbutton_{{ b64[2] }}> + {% else %} + <a class="btn btn-primary" target="_blank" href="{{ b64[8] }}"><i class="fa fa-link"> VT Report</i></a> + {% endif %} + <button class="btn btn-default" onclick="updateVTReport('{{ b64[2] }}')"> + <div id="report_vt_{{ b64[2] }}"><span class="glyphicon glyphicon-refresh"></span> {{ b64[9] }}</div> + </button> + {% else %} + Virus Total submission is disabled + {% endif %} + + </td> + <td id="sparklines_{{ b64[2] }}" style="text-align:center;"> + </td> + </tr> + {% endfor %} + </tbody> + </table> + {% else %} + {% if date_from|string == date_to|string %} + <h3> {{ date_from }}, No Hashes</h3> + {% else %} + <h3> {{ date_from }} to {{ date_to }}, No Hashes</h3> + {% endif %} + {% endif %} + </div> + + </div> + <!-- /.row --> + + <script> + var chart = {}; + $(document).ready(function(){ + activePage = "page-hashDecoded" + $("#"+activePage).addClass("active"); + + $('#date-range-from').dateRangePicker({ + separator : ' to ', + getValue: function() + { + if ($('#date-range-from').val() && $('#date-range-to').val() ) + return $('#date-range-from').val() + ' to ' + $('#date-range-to').val(); + else + return ''; + }, + setValue: function(s,s1,s2) + { + $('#date-range-from').val(s1); + $('#date-range-to').val(s2); + } + }); + $('#date-range-to').dateRangePicker({ + separator : ' to ', + getValue: function() + { + if ($('#date-range-from').val() && $('#date-range-to').val() ) + return $('#date-range-from').val() + ' to ' + $('#date-range-to').val(); + else + return ''; + }, + setValue: function(s,s1,s2) + { + $('#date-range-from').val(s1); + $('#date-range-to').val(s2); + } + }); + + $('#tableb64').DataTable({ + "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]], + "iDisplayLength": 10, + "order": [[ 3, "desc" ]] + }); + + {% if type %} + chart.stackBarChart =barchart_type_stack('/hashDecoded/hash_by_type_json?type={{type}}', 'id'); + {% elif daily_type_chart %} + chart.stackBarChart =barchart_type_stack('/hashDecoded/range_type_json?date_from={{daily_date}}&date_to={{daily_date}}', 'id'); + {% else %} + chart.stackBarChart = barchart_type_stack("/hashDecoded/range_type_json?date_from={{date_from}}&date_to={{date_to}}", 'id') + {% endif %} + + chart.onResize(); + $(window).on("resize", function() { + chart.onResize(); + }); + }); + </script> +<script> + function updateVTReport(hash) { + //updateReport + $.getJSON('/hashDecoded/update_vt_result?hash='+hash, + function(data) { + content = '<span class="glyphicon glyphicon-refresh"></span> ' +data['report_vt'] + $( "#report_vt_"+hash ).html(content); + }); + } + + function sendFileToVT(hash) { + //send file to vt + $.getJSON('/hashDecoded/send_file_to_vt_js?hash='+hash, + function(data) { + var content = '<a id="submit_vt_'+hash+'" class="btn btn-primary" target="_blank" href="'+ data['vt_link'] +'"><i class="fa fa-link"> '+ ' VT Report' +'</i></a>'; + $('#submit_vt_'+hash).remove(); + $('darkbutton_'+hash).append(content); + }); + } +</script> +<script> +//var data = [6,3,3,2,5,3,9]; + +// a sparklines plot +function sparklines(id, points) { + var width = 100, height = 60; + + var data = [] + for (i = 0; i < points.length; i++) { + data[i] = { + 'x': i, + 'y': +points[i] + } + } + + var x = d3.scaleLinear() + .range([0, width - 10]) + .domain([0,5]); + + var y = d3.scaleLinear() + .range([height, 0]) + .domain([0,10]); + + var line = d3.line() + .x(function(d) {return x(d.x)}) + .y(function(d) {return y(d.y)}); + + d3.select("#"+id).append('svg') + .attr('width', width) + .attr('height', height) + .append('path') + .attr('class','line') + .datum(data) + .attr('d', line); + +} +</script> +<script> + {% for b64 in l_64 %} + sparklines("sparklines_{{ b64[2] }}", {{ b64[10] }}) + {% endfor %} +</script> + +<script> +var margin = {top: 20, right: 100, bottom: 55, left: 45}, + width = 1000 - margin.left - margin.right, + height = 500 - margin.top - margin.bottom; +var x = d3.scaleBand().rangeRound([0, width]).padding(0.1); + +var y = d3.scaleLinear().rangeRound([height, 0]); + +var xAxis = d3.axisBottom(x); + +var yAxis = d3.axisLeft(y); + +var color = d3.scaleOrdinal(d3.schemeSet3); + +var svg = d3.select("#barchart_type").append("svg") + .attr("id", "thesvg") + .attr("viewBox", "0 0 1000 500") + .attr("width", width + margin.left + margin.right) + .attr("height", height + margin.top + margin.bottom) + .append("g") + .attr("transform", "translate(" + margin.left + "," + margin.top + ")"); + +function barchart_type_stack(url, id) { + + d3.json(url) + .then(function(data){ + + var labelVar = 'date'; //A + var varNames = d3.keys(data[0]) + .filter(function (key) { return key !== labelVar;}); //B + + data.forEach(function (d) { //D + var y0 = 0; + d.mapping = varNames.map(function (name) { + return { + name: name, + label: d[labelVar], + y0: y0, + y1: y0 += +d[name] + }; + }); + d.total = d.mapping[d.mapping.length - 1].y1; + }); + + x.domain(data.map(function (d) { return (d.date); })); //E + y.domain([0, d3.max(data, function (d) { return d.total; })]); + + svg.append("g") + .attr("class", "x axis") + .attr("transform", "translate(0," + height + ")") + .call(xAxis) + .selectAll("text") + .attr("class", "bar") + {% if date_from|string == date_to|string and type is none %} + .on("click", function (d) { window.location.href = '/hashDecoded/?date_from={{date_from}}&date_to={{date_to}}&type='+d }) + .attr("transform", "rotate(-18)" ) + {% elif date_from|string == date_to|string and type is not none %} + .on("click", function (d) { window.location.href = '/hashDecoded/?date_from='+d+'&date_to='+d }) + .attr("transform", "rotate(-18)" ) + {% else %} + .on("click", function (d) { window.location.href = '/hashDecoded/?date_from='+d+'&date_to='+d }) + .attr("transform", "rotate(-40)" ) + {% endif %} + .style("text-anchor", "end"); + + svg.append("g") + .attr("class", "y axis") + .call(yAxis) + .append("text") + .attr("transform", "rotate(-90)") + .attr("y", 6) + .attr("dy", ".71em") + .style("text-anchor", "end"); + + var selection = svg.selectAll(".series") + .data(data) + .enter().append("g") + .attr("class", "series") + .attr("transform", function (d) { return "translate(" + x((d.date)) + ",0)"; }); + + selection.selectAll("rect") + .data(function (d) { return d.mapping; }) + .enter().append("rect") + .attr("class", "bar_stack") + .attr("width", x.bandwidth()) + .attr("y", function (d) { return y(d.y1); }) + .attr("height", function (d) { return y(d.y0) - y(d.y1); }) + .style("fill", function (d) { return color(d.name); }) + .style("stroke", "grey") + .on("mouseover", function (d) { showPopover.call(this, d); }) + .on("mouseout", function (d) { removePopovers(); }) + {% if date_from|string == date_to|string and type is none %} + .on("click", function(d){ window.location.href = "/hashDecoded/" +'?date_from={{date_from}}&date_to={{date_to}}&type='+d.label+'&encoding='+d.name; }); + {% elif date_from|string == date_to|string and type is not none %} + .on("click", function(d){ window.location.href = "/hashDecoded/" +'?type={{type}}&date_from='+d.label+'&date_to='+d.label+'&encoding='+d.name; }); + {% else %} + .on("click", function(d){ window.location.href = "/hashDecoded/" +'?type='+ d.name +'&date_from='+d.label+'&date_to='+d.label; }); + {% endif %} + + data.forEach(function(d) { + if(d.total != 0){ + svg.append("text") + .attr("class", "bar") + .attr("dy", "-.35em") + //.on("click", (window.location.href = "/hashDecoded/"+'?date_from='+d.date) ) + .attr('x', x(d.date) + x.bandwidth()/2) + .attr('y', y(d.total)) + {% if date_from|string == date_to|string and type is none %} + .on("click", function () {window.location.href = "/hashDecoded/"+'?date_from={{date_from}}&date_to={{date_to}}&type='+d.date }) + {% elif date_from|string == date_to|string and type is not none %} + .on("click", function () {window.location.href = '/hashDecoded/?type={{type}}&date_from='+d.date+'&date_to='+d.date }) + {% else %} + .on("click", function () {window.location.href = "/hashDecoded/"+'?date_from='+d.date+'&date_to='+d.date }) + {% endif %} + .style("text-anchor", "middle") + .text(d.total); + } + }); + + drawLegend(varNames); + }); + +} + +function drawLegend (varNames) { + var legend = svg.selectAll(".legend") + .data(varNames.slice().reverse()) + .enter().append("g") + .attr("class", "legend") + .attr("transform", function (d, i) { return "translate(0," + i * 20 + ")"; }); + + legend.append("rect") + .attr("x", 943) + .attr("width", 10) + .attr("height", 10) + .style("fill", color) + .style("stroke", "grey"); + + legend.append("text") + .attr("class", "svgText") + .attr("x", 941) + .attr("y", 6) + .attr("dy", ".35em") + .style("text-anchor", "end") + .text(function (d) { return d; }); +} + +function removePopovers () { + $('.popover').each(function() { + $(this).remove(); + }); + } + +function showPopover (d) { + $(this).popover({ + title: d.name, + placement: 'auto top', + container: 'body', + trigger: 'manual', + html : true, + content: function() { + return d.label + + "<br/>num: " + d3.format(",")(d.value ? d.value: d.y1 - d.y0); } + }); + $(this).popover('show') +} + +chart.onResize = function () { + var aspect = 1000 / 500, chart = $("#thesvg"); + var targetWidth = chart.parent().width(); + chart.attr("width", targetWidth); + chart.attr("height", targetWidth / aspect); + } + +window.chart = chart; + +</script> + +<script> + +var width_pie = 200; +var height_pie = 200; +var padding_pie = 10; +var opacity_pie = .8; + +var radius_pie = Math.min(width_pie - padding_pie, height_pie - padding_pie) / 2; +//var color_pie = d3.scaleOrdinal(d3.schemeCategory10); +var color_pie = d3.scaleOrdinal(d3.schemeSet3); + +var div_pie = d3.select("body").append("div") + .attr("class", "tooltip") + .style("opacity", 0); + +var svg_pie = d3.select("#pie_chart_encoded") + .append('svg') + .attr("width", '100%') + .attr("height", '100%') + .attr('viewBox','0 0 '+Math.min(width_pie,height_pie) +' '+Math.min(width_pie,height_pie) ) + .attr('preserveAspectRatio','xMinYMin') + + +var g_pie = svg_pie.append('g') + .attr('transform', 'translate(' + (width_pie/2) + ',' + (height_pie/2) + ')'); + +var arc_pie = d3.arc() + .innerRadius(0) + .outerRadius(radius_pie); + +d3.json("/hashDecoded/decoder_type_json?date_from={{date_from}}&date_to={{date_to}}&type={{type}}") + .then(function(data){ + + var pie_pie = d3.pie() + .value(function(d) { return d.value; }) + .sort(null); + + var path_pie = g_pie.selectAll('path') + .data(pie_pie(data)) + .enter() + .append("g") + .append('path') + .attr('d', arc_pie) + .attr('fill', (d,i) => color_pie(i)) + .attr('class', 'pie_path') + .on("mouseover", mouseovered_pie) + .on("mouseout", mouseouted_pie) + .on("click", function (d) {window.location.href = '/hashDecoded/?date_from={{date_from}}&date_to={{date_to}}&type={{type}}&encoding='+d.data.name }) + .style('opacity', opacity_pie) + .style('stroke', 'white'); + }); + +function mouseovered_pie(d) { + + // tooltip + var content; + + content = "<b>"+d.data.name+"</b>"+"<br/>"+ + "<br/>"+ + "<i>Decoded</i>: "+d.data.value+"<br/>" + + div_pie.transition() + .duration(200) + .style("opacity", .9); + div_pie.html(content) + .style("left", (d3.event.pageX) + "px") + .style("top", (d3.event.pageY - 28) + "px"); +} + +function mouseouted_pie() { + div_pie.transition() + .duration(500) + .style("opacity", 0); +} +</script> + + + +<script> +function barchart_type(url, id) { + + + var margin = {top: 20, right: 20, bottom: 70, left: 40}; + + var width = 960 - margin.left - margin.right; + var height = 500 - margin.top - margin.bottom; + + var x = d3.scaleBand().rangeRound([0, width]).padding(0.1); + var y = d3.scaleLinear().rangeRound([height, 0]); + + var xAxis = d3.axisBottom(x) + //.tickFormat(d3.time.format("%Y-%m")); + + var yAxis = d3.axisLeft(y) + .ticks(10); + +/*var svg = d3.select(id).append("svg") + .attr("width", width + margin.left + margin.right) + .attr("height", height + margin.top + margin.bottom) + .attr("id", "thesvg") + .append("g") + .attr("transform", + "translate(" + margin.left + "," + margin.top + ")");*/ + + + d3.json(url) + .then(function(data){ + + data.forEach(function(d) { + d.value = +d.value; + }); + + x.domain(data.map(function(d) { return d.date; })); + y.domain([0, d3.max(data, function(d) { return d.value; })]); + + var label = svg.append("g") + .attr("class", "x axis") + .attr("transform", "translate(0," + height + ")") + .call(xAxis) + .selectAll("text") + .style("text-anchor", "end") + .attr("dx", "-.8em") + .attr("dy", "-.55em") + {% if daily_type_chart %} + .attr("transform", "rotate(-20)" ); + {% else %} + .attr("transform", "rotate(-70)" ) + .attr("class", "bar") + .on("click", function (d) { window.location.href = "/hashDecoded/"+'?date_from='+d+'&date_to='+d }); + {% endif %} + + svg.append("g") + .attr("class", "y axis") + .call(yAxis) + .append("text") + .attr("transform", "rotate(-90)") + .attr("y", 6) + .attr("dy", ".71em") + .style("text-anchor", "end") + .text("Value ($)"); + + var bar = svg.selectAll("bar") + .data(data) + .enter().append("rect") + .attr("class", "bar") + //.style("fill", "steelblue") + .attr("x", function(d) { return x(d.date); }) + .attr("width", x.bandwidth()) + .attr("y", function(d) { return y(d.value); }) + .attr("height", function(d) { return height - y(d.value); }) + {% if type %} + .on("click", function(d){ window.location.href = "/hashDecoded/" +'?type={{type}}&date_from='+ d.date +'&date_to='+ d.date; }); + {% endif %} + {% if daily_type_chart %} + .on("click", function(d){ window.location.href = "/hashDecoded/" +'?type='+d.date+'&date_from={{ daily_date }}&date_to={{ daily_date }}'; }); + {% endif %} + + + data.forEach(function(d) { + if(d.value != 0){ + svg.append("text") + .attr("class", "bar") + .attr("dy", "-.35em") + //.text(function(d) { return d.value; }); + .text(d.value) + .style("text-anchor", "middle") + .attr('x', x(d.date) + x.bandwidth()/2) + .attr('y', y(d.value)); + } + }); + + }); + +} +</script> + + + </body> + +</html> diff --git a/var/www/modules/hashDecoded/templates/header_hashDecoded.html b/var/www/modules/hashDecoded/templates/header_hashDecoded.html new file mode 100644 index 00000000..69fb9da9 --- /dev/null +++ b/var/www/modules/hashDecoded/templates/header_hashDecoded.html @@ -0,0 +1 @@ +<li id='page-hashDecoded'><a href="{{ url_for('hashDecoded.hashDecoded_page') }}"><i class="fa fa-files-o"></i> hashesDecoded </a></li> diff --git a/var/www/modules/hashDecoded/templates/showHash.html b/var/www/modules/hashDecoded/templates/showHash.html new file mode 100644 index 00000000..458c4c92 --- /dev/null +++ b/var/www/modules/hashDecoded/templates/showHash.html @@ -0,0 +1,611 @@ +<!DOCTYPE html> +<html> + + <head> + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width, initial-scale=1.0"> + + <title>Hash Information - AIL</title> + + <!-- Core CSS --> + <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> + <link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet"> + <link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet"> + <link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" /> + <link href="{{ url_for('static', filename='css/daterangepicker.min.css') }}" rel="stylesheet" type="text/css" /> + <!-- JS --> + <script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script> + <script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script> + <script src="{{ url_for('static', filename='js/dataTables.bootstrap.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.flot.time.js') }}"></script> + <script src="{{ url_for('static', filename='js/jquery.flot.stack.js') }}"></script> + <script language="javascript" src="{{ url_for('static', filename='js/moment.min.js') }}"></script> + <script language="javascript" src="{{ url_for('static', filename='js/jquery.daterangepicker.min.js') }}"></script> + <script language="javascript" src="{{ url_for('static', filename='js/d3.min.js') }}"></script> + <style> + line.link { + stroke: #666; + } + line.link:hover{ + stroke: red; + stroke-width: 2px + } + .line_sparkline { + fill: none; + stroke: #000; + stroke-width: 2.0px; + } + .node { + pointer-events: all; + } + + circle { + stroke: none; + } + + .graph_text_node { + font: 8px sans-serif; + pointer-events: none; + } + + .graph_node_icon { + pointer-events: none; + } + + .node text { + font: 8px sans-serif; + pointer-events: auto; + } + + div.tooltip { + position: absolute; + text-align: center; + padding: 2px; + font: 12px sans-serif; + background: #ebf4fb; + border: 2px solid #b7ddf2; + border-radius: 8px; + pointer-events: none; + color: #000000; + } + + .graph_panel { + padding: unset; + } + + .line_graph { + fill: none; + stroke: steelblue; + stroke-width: 2px; + stroke-linejoin: round; + stroke-linecap: round; + stroke-width: 1.5; + /*attr('stroke', '#bcbd22').*/ + } + </style> + </head> + <body> + + {% include 'navbar.html' %} + + <div id="page-wrapper"> + <div class="row"> + + + </div> + + <!-- /#page-wrapper --> + <div class="panel panel-info"> + <div class="panel-heading panelText"> + <h3>{{ hash }} :</h3> + <span class="pull-right"> </span> + <span class="badge pull-right">6 / 26</span> + <ul class="list-group"><li class="list-group-item"> + + <div class="row"> + <div class="col-md-10"> + + <table class="table table-condensed"> + <thead> + <tr> + <th>Estimated type</th> + <th>First_seen</th> + <th>Last_seen</th> + <th>Size (Kb)</th> + <th>Nb seen</th> + </tr> + </thead> + <tbody> + <tr> + <td class="panelText"><i class="fa {{ file_icon }}"></i> {{ estimated_type }}</td> + <td class="panelText">{{ first_seen }}</td> + <td class="panelText">{{ last_seen }}</td> + <td class="panelText">{{ size }}</td> + <td class="panelText">{{ nb_seen_in_all_pastes }}</td> + </tr> + </tbody> + </table> + </div> + <div class="col-md-1"> + <div id="sparkline"></div> + </div> + </div> + </li></ul> + + {% if vt_enabled %} + {% if not b64_vt %} + <darkbutton> + <button id="submit_vt_b" class="btn btn-primary" onclick="sendFileToVT('{{ hash }}')"> + <i class="fa fa-paper-plane"></i> Send this file to VT + </button> + </darkbutton> + {% else %} + <a class="btn btn-primary" target="_blank" href="{{ b64_vt_link }}"><i class="fa fa-link"> VT Report</i></a> + {% endif %} + <button class="btn btn-default" onclick="updateVTReport('{{ hash }}')"> + <div id="report_vt_b"><span class="glyphicon glyphicon-refresh"></span> {{ b64_vt_report }}</div> + </button> + {% else %} + Virus Total submission is disabled + {% endif %} + + <a href="/hashDecoded/downloadHash?hash={{hash}}" target="blank"> + <button class='btn btn-info pull-right'><i id="flash-tld" class="glyphicon glyphicon-download-alt " flash-tld=""></i> Download Hashed file + </button> + </a> + </div></div> + + <div class="row"> + <div class="col-md-10"> + + <div class="panel panel-default"> + <div class="panel-heading"> + <i id="flash-tld" class="glyphicon glyphicon-flash " flash-tld=""></i> Graph + </div> + <div class="panel-body graph_panel"> + <div id="graph"> + </div> + </div> + </div> + </div> + + <div class="col-md-2"> + + <div class="panel panel-info"> + <div class="panel-heading"> + <i class="fa fa-unlock-alt" aria-hidden="true"></i> Encoding + </div> + <div class="panel-body" style="text-align:center;"> + {% for encoding in list_hash_decoder %} + <button id="" class="btn btn-default"> + {{encoding['encoding']}} <span class="badge">{{encoding['nb_seen']}}</span> + </button> + {% endfor %} + </div> + </div> + + <div class="panel panel-default"> + <div class="panel-heading"> + <i id="flash-tld" class="glyphicon glyphicon-flash " flash-tld=""></i> Graph + </div> + <div class="panel-body" style="text-align:center;"> + <button class="btn btn-primary" onclick="resize_graph();"> + <span class="glyphicon glyphicon-refresh"></span> Resize Graph</div> + </button> + + + <ul class="list-group"> + <li class="list-group-item list-group-item-info" style="text-align:center;"><i class="fa fa-info-circle fa-2x"></i></li> + <li class="list-group-item"> + <p>Double click on a node to open Hash/Paste<br><br> + <svg height="12" width="12"><g class="nodes"><circle cx="6" cy="6" r="6" fill="orange"></circle></g></svg> + Current Hash<br> + <svg height="12" width="12"><g class="nodes"><circle cx="6" cy="6" r="6" fill="rgb(141, 211, 199)"></circle></g></svg> + Hashes<br> + <svg height="12" width="12"><g class="nodes"><circle cx="6" cy="6" r="6" fill="#1f77b4"></circle></g></svg> + Pastes + </p> + </li> + <li class="list-group-item list-group-item-info"> + Hash Types: + </li> + <li class="list-group-item"> + <i class="fa fa-file"></i> Application<br> + <i class="fa fa-file-video-o"></i> Audio<br> + <i class="fa fa-file-image-o"></i> Image<br> + <i class="fa fa-file-text-o"></i> Text<br> + <i class="fa fa-file-o"></i> Other + </li> + </ul> + </div> + </div> + </div> + + <div class="panel panel-default"> + <div class="panel-heading"> + <i id="flash-tld" class="glyphicon glyphicon-stats" flash-tld=""></i> Graph + </div> + <div class="panel-body "> + <div id="graph_line"> + </div> + </div> + </div> + + </div> + + </div> + + </div> + <!-- /.row --> + + <script> + var all_graph = {}; + $(document).ready(function(){ + sparklines("sparkline", {{ sparkline_values }}) + + all_graph.node_graph = create_graph('/hashDecoded/hash_graph_node_json?hash={{hash}}'); + all_graph.line_chart = create_line_chart('graph_line', '/hashDecoded/hash_graph_line_json?hash={{hash}}'); + all_graph.onResize(); + }); + + $(window).on("resize", function() { + all_graph.onResize(); + }); + </script> +<script> + function sendFileToVT(hash) { + //send file to vt + $.getJSON('/hashDecoded/send_file_to_vt_js?hash='+hash, + function(data) { + var content = '<a id="submit_vt_b" class="btn btn-primary" target="_blank" href="'+ data['vt_link'] +'"><i class="fa fa-link"> '+ ' VT Report' +'</i></a>'; + $('#submit_vt_b').remove(); + $('darkbutton').append(content); + }); + } + + function updateVTReport(hash) { + //updateReport + $.getJSON('/hashDecoded/update_vt_result?hash='+hash, + function(data) { + var content = '<span class="glyphicon glyphicon-refresh"></span> ' +data['report_vt']; + $( "#report_vt_b" ).html(content); + }); + } + +</script> +<script> + function resize_graph() { + zoom.translateTo(svg_node, 200, 200); + zoom.scaleTo(svg_node, 2); + } + +</script> +<script> +//var data = [6,3,3,2,5,3,9]; + +// a sparklines plot +function sparklines(id, points) { + var width_spark = 100, height_spark = 60; + + var data = [] + for (i = 0; i < points.length; i++) { + data[i] = { + 'x': i, + 'y': +points[i] + } + } + + var x = d3.scaleLinear() + .range([0, width_spark - 10]) + .domain([0,5]); + + var y = d3.scaleLinear() + .range([height_spark, 0]) + .domain([0,10]); + + var line = d3.line() + .x(function(d) {return x(d.x)}) + .y(function(d) {return y(d.y)}); + + d3.select("#"+id).append('svg') + .attr('width', width_spark) + .attr('height', height_spark) + .append('path') + .attr('class','line_sparkline') + .datum(data) + .attr('d', line); + +} +</script> + +<script> +var width = 400, + height = 400; + +var link; + +var zoom = d3.zoom() + .scaleExtent([.2, 10]) + .on("zoom", zoomed); + +//var transform = d3.zoomIdentity; + +var color = d3.scaleOrdinal(d3.schemeCategory10); + +var div = d3.select("body").append("div") + .attr("class", "tooltip") + .style("opacity", 0); + +var simulation = d3.forceSimulation() + .force("link", d3.forceLink().id(function(d) { return d.id; })) + .force("charge", d3.forceManyBody()) + .force("center", d3.forceCenter(width / 2, height / 2)); + //.on("tick", ticked); + +var svg_node = d3.select("#graph").append("svg") + .attr("id", "graph_div") + .attr("width", width) + .attr("height", height) + .call(d3.zoom().scaleExtent([1, 8]).on("zoom", zoomed)) + .on("dblclick.zoom", null) + +var container_graph = svg_node.append("g"); + //.attr("transform", "translate(40,0)") + //.attr("transform", "scale(2)"); + +function create_graph(url){ + +d3.json(url) +.then(function(data){ + + link = container_graph.append("g") + .selectAll("line") + .data(data.links) + .enter().append("line") + .attr("class", "link"); + //.attr("stroke-width", function(d) { return Math.sqrt(d.value); }) + + var node = container_graph.selectAll(".node") + .data(data.nodes) + .enter().append("g") + .attr("class", "nodes") + .on("dblclick", doubleclick) + .on("click", click) + .on("mouseover", mouseovered) + .on("mouseout", mouseouted) + .call(d3.drag() + .on("start", drag_start) + .on("drag", dragged) + .on("end", drag_end)); + + + node.append("circle") + .attr("r", function(d) { + return (d.hash) ? 6 : 5; }) + .attr("fill", function(d) { + if(!d.hash){ return color(d.group);} + if(d.group == 1){ return "orange";} + return "rgb(141, 211, 199)"; }); + + node.append('text') + .attr('text-anchor', 'middle') + .attr('dominant-baseline', 'central') + .attr("class", "graph_node_icon") + .attr('font-family', 'FontAwesome') + .attr('font-size', '8px' ) + .attr('pointer-events', 'none') + .text(function(d) { + if(d.hash){ + return d.icon + } }); + + zoom.translateTo(svg_node, 200, 200); + zoom.scaleTo(svg_node, 2); + +/* node.append("text") + .attr("dy", 3) + .attr("dx", 7) + .attr("class", "graph_text_node") + //.style("text-anchor", function(d) { return d.children ? "end" : "start"; }) + .text(function(d) { return d.id; });*/ + + simulation + .nodes(data.nodes) + .on("tick", ticked); + + simulation.force("link") + .links(data.links); + + function ticked() { + link + .attr("x1", function(d) { return d.source.x; }) + .attr("y1", function(d) { return d.source.y; }) + .attr("x2", function(d) { return d.target.x; }) + .attr("y2", function(d) { return d.target.y; }); + + /*node + .attr("cx", function(d) { return d.x; }) + .attr("cy", function(d) { return d.y; });*/ + node.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; }); + } + +}); +} + +function zoomed() { + container_graph.attr("transform", d3.event.transform); +} + +function doubleclick (d) { +window.open(d.url, '_blank'); +} + +function click (d) { +console.log('clicked') +} + +function drag_start(d) { +if (!d3.event.active) simulation.alphaTarget(0.3).restart(); +d.fx = d.x; +d.fy = d.y; +} + +function dragged(d) { +d.fx = d3.event.x; +d.fy = d3.event.y; +} + +function drag_end(d) { +if (!d3.event.active) simulation.alphaTarget(0); +d.fx = d.x; +d.fy = d.y; +} + +function mouseovered(d) { + +// tooltip +var content; + +if(d.hash == true){ + content = "<b>"+d.id+"</b>"+"<br/>"+ + "<br/>"+ + "<i>First seen</i>: "+d.first_seen+"<br/>"+ + "<i>Last seen</i>: "+d.last_seen+"<br/>"+ + "<i>nb_seen_in_paste</i>: "+d.nb_seen_in_paste+"<br/>"+ + "<i>Size (kb)</i>: "+d.size+"<br/>"+ + "<br/>"+ + "<i>Estimated type</i>: "+d.estimated_type; +} else { + content = "<b>"+d.id+"</b>"+"<br/>"; +} + + div.transition() + .duration(200) + .style("opacity", .9); + div.html(content) + .style("left", (d3.event.pageX) + "px") + .style("top", (d3.event.pageY - 28) + "px"); + + //links + /*link.style("stroke-opacity", function(o) { + return o.source === d || o.target === d ? 1 : opacity; + });*/ + link.style("stroke", function(o){ + return o.source === d || o.target === d ? "#666" : "#ddd"; + }); +} + +function mouseouted() { + div.transition() + .duration(500) + .style("opacity", 0); + + link.style("stroke", "#666"); +} + +all_graph.onResize = function () { + var aspect = 1000 / 500, all_graph = $("#graph_div"); + var targetWidth = all_graph.parent().width(); + all_graph.attr("width", targetWidth); + all_graph.attr("height", targetWidth / aspect); +} + +window.all_graph = all_graph; +</script> + +<script> +function create_line_chart(id, url){ + + var width = 900; + var height = Math.round(width / 4); + + var margin = {top: 20, right: 55, bottom: 50, left: 40}; + + var x = d3.scaleTime().range([0, width]); + var y = d3.scaleLinear().rangeRound([height, 0]); + + var xAxis = d3.axisBottom(x); + var yAxis = d3.axisLeft(y); + + var parseTime = d3.timeParse("%Y-%m-%d"); + + var line = d3.line() + .x(function(d) { + return x(d.date); + }).y(function(d) { + return y(d.value); + }); + + var svg_line = d3.select('#'+id).append('svg') + .attr("id", "graph_div") + .attr("width", width + margin.left + margin.right) + .attr("height", height + margin.top + margin.bottom) + .append('g') + .attr('transform', "translate("+ margin.left +","+ margin.top +")"); + + var div = d3.select('body').append('div') + .attr('class', 'tooltip') + .style('opacity', 0); + + //add div tooltip + +d3.json(url) + .then(function(data){ + + data.forEach(function(d) { + d.date_label = d.date; + d.date = parseTime(d.date); + d.value = +d.value; + }); + + // fit the data + x.domain(d3.extent(data, function(d) { return d.date; })); + //x.domain(data.map(function (d) { return d.date; })); //E + y.domain([0, d3.max(data, function(d){ return d.value ; })]); + + //line + svg_line.append("path") + .data([data]) + .attr("class", "line_graph") + .attr("d", line); + + // add X axis + svg_line.append("g") + .attr("transform", "translate(0," + height + ")") + .call(d3.axisBottom(x)) + .selectAll("text") + .style("text-anchor", "end") + .attr("transform", "rotate(-45)" ); + + // Add the Y Axis + svg_line.append("g") + .call(d3.axisLeft(y)); + + //add a dot circle + svg_line.selectAll('dot') + .data(data).enter() + .append('circle') + .attr('r', 2) + .attr('cx', function(d) { return x(d.date); }) + .attr('cy', function(d) { return y(d.value); }) + + .on('mouseover', function(d) { + div.transition().style('opacity', .9); + div.html('' + d.date_label+ '<br/>' + d.value).style('left', (d3.event.pageX) + 'px') + .style("left", (d3.event.pageX) + "px") + .style("top", (d3.event.pageY - 28) + "px"); + }) + .on('mouseout', function(d) + { + div.transition().style('opacity', 0); + }); + + }); +} +</script> + + </body> + +</html> diff --git a/var/www/modules/search/templates/search.html b/var/www/modules/search/templates/search.html index 3c7e3472..adc1b555 100644 --- a/var/www/modules/search/templates/search.html +++ b/var/www/modules/search/templates/search.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Search - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/sentiment/templates/header_sentiment.html b/var/www/modules/sentiment/templates/header_sentiment.html index 650a7717..7e757604 100644 --- a/var/www/modules/sentiment/templates/header_sentiment.html +++ b/var/www/modules/sentiment/templates/header_sentiment.html @@ -1,4 +1,4 @@ -<li id='page-sentiment'><a class="dropdown-toggle" data-toggle="dropdown" href="#"><i class="fa fa-heart"></i> Sentiment Analysis +<li id='page-sentiment'><a class="dropdown-toggle" data-toggle="dropdown" href="{{ url_for('sentiments.sentiment_analysis_trending') }}"><i class="fa fa-heart"></i> Sentiment Analysis <span class="caret"></span></a> <ul class="dropdown-menu"> <li><a href="{{ url_for('sentiments.sentiment_analysis_trending') }}"><i class="fa fa-bar-chart-o"> </i> Sentiment trending</a></li> diff --git a/var/www/modules/sentiment/templates/sentiment_analysis_plot_tool.html b/var/www/modules/sentiment/templates/sentiment_analysis_plot_tool.html index 5d79addc..c8077e3b 100644 --- a/var/www/modules/sentiment/templates/sentiment_analysis_plot_tool.html +++ b/var/www/modules/sentiment/templates/sentiment_analysis_plot_tool.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Sentiment Plot Tool - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> @@ -68,7 +69,7 @@ <ul id="providerList2"> </ul> </div> - </div> + </div> </div> <!-- right column --> <div class="col-lg-3"> @@ -90,7 +91,7 @@ <!-- /.panel --> </div> </div> - + <!-- Panel PLOT --> <div class="row"> <div class="col-lg-12"> diff --git a/var/www/modules/sentiment/templates/sentiment_analysis_trending.html b/var/www/modules/sentiment/templates/sentiment_analysis_trending.html index 4895bba1..f0c12790 100644 --- a/var/www/modules/sentiment/templates/sentiment_analysis_trending.html +++ b/var/www/modules/sentiment/templates/sentiment_analysis_trending.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Sentiment Trending - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> @@ -36,7 +37,7 @@ strong { font-size: 16px; } - + .table { margin-bottom: 0px; } @@ -45,7 +46,7 @@ padding-left:0; list-style:none } - + .sparkLineStats { position: relative; margin-bottom: -4px; @@ -64,7 +65,7 @@ .sparkLineStats ul li div:first-child { margin-right: 5px; } - + .panelInside { padding: 5px; } @@ -119,7 +120,7 @@ <div id="today_divr" class="sparkLineStats"> </div> - </div> + </div> </div> <!-- right column --> <div class="col-lg-3"> @@ -167,9 +168,9 @@ </div> <div class="col-lg-6"> <div id="week_divr" class="sparkLineStats"> - + </div> - </div> + </div> </div> <!-- right column --> <div class="col-lg-3"> @@ -221,7 +222,7 @@ </div> </div> - + <!-- /.row --> </div> <!-- /#page-wrapper --> @@ -236,7 +237,7 @@ $(document).ready(function(){ activePage = $('h1.page-header').attr('data-page'); $("#"+activePage).addClass("active"); - $('[data-toggle="tooltip"]').tooltip(); + $('[data-toggle="tooltip"]').tooltip(); $("#LoadAll").click(function(){ draw_page("True"); }); draw_page("False"); diff --git a/var/www/modules/showpaste/Flask_showpaste.py b/var/www/modules/showpaste/Flask_showpaste.py index 13c2cc45..25e60279 100644 --- a/var/www/modules/showpaste/Flask_showpaste.py +++ b/var/www/modules/showpaste/Flask_showpaste.py @@ -6,12 +6,14 @@ ''' import redis import json +import os import flask -from flask import Flask, render_template, jsonify, request, Blueprint, make_response +from flask import Flask, render_template, jsonify, request, Blueprint, make_response, redirect, url_for, Response import difflib import ssdeep import Paste +import requests # ============ VARIABLES ============ import Flask_config @@ -28,13 +30,15 @@ DiffMaxLineLength = Flask_config.DiffMaxLineLength bootstrap_label = Flask_config.bootstrap_label misp_event_url = Flask_config.misp_event_url hive_case_url = Flask_config.hive_case_url +vt_enabled = Flask_config.vt_enabled showsavedpastes = Blueprint('showsavedpastes', __name__, template_folder='templates') # ============ FUNCTIONS ============ -def showpaste(content_range): - requested_path = request.args.get('paste', '') +def showpaste(content_range, requested_path): + vt_enabled = Flask_config.vt_enabled + paste = Paste.Paste(requested_path) p_date = str(paste._get_p_date()) p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4] @@ -118,7 +122,6 @@ def showpaste(content_range): else: automatic = False - tag_hash = ssdeep.hash(tag) if r_serv_statistics.sismember('tp:'+tag, requested_path): tag_status_tp = True else: @@ -130,6 +133,40 @@ def showpaste(content_range): list_tags.append( (tag, automatic, tag_status_tp, tag_status_fp) ) + l_64 = [] + # load hash files + if r_serv_metadata.scard('hash_paste:'+requested_path) > 0: + set_b64 = r_serv_metadata.smembers('hash_paste:'+requested_path) + for hash in set_b64: + nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, requested_path)) + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file-o ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file' + saved_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + else: + b64_vt = False + b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + # hash never refreshed + if b64_vt_report is None: + b64_vt_report = '' + + l_64.append( (file_icon, estimated_type, hash, saved_path, nb_in_file, b64_vt, b64_vt_link, b64_vt_report) ) + if Flask_config.pymisp is False: misp = False else: @@ -157,25 +194,28 @@ def showpaste(content_range): hive_url = hive_case_url.replace('id_here', hive_case) return render_template("show_saved_paste.html", date=p_date, bootstrap_label=bootstrap_label, active_taxonomies=active_taxonomies, active_galaxies=active_galaxies, list_tags=list_tags, source=p_source, encoding=p_encoding, language=p_language, size=p_size, mime=p_mime, lineinfo=p_lineinfo, content=p_content, initsize=len(p_content), duplicate_list = p_duplicate_list, simil_list = p_simil_list, hashtype_list = p_hashtype_list, date_list=p_date_list, - misp=misp, hive=hive, misp_eventid=misp_eventid, misp_url=misp_url, hive_caseid=hive_caseid, hive_url=hive_url) + l_64=l_64, vt_enabled=vt_enabled, misp=misp, hive=hive, misp_eventid=misp_eventid, misp_url=misp_url, hive_caseid=hive_caseid, hive_url=hive_url) # ============ ROUTES ============ @showsavedpastes.route("/showsavedpaste/") #completely shows the paste in a new tab def showsavedpaste(): - return showpaste(0) + requested_path = request.args.get('paste', '') + print(requested_path) + return showpaste(0, requested_path) @showsavedpastes.route("/showsavedrawpaste/") #shows raw def showsavedrawpaste(): requested_path = request.args.get('paste', '') paste = Paste.Paste(requested_path) content = paste.get_p_content() - return content, 200, {'Content-Type': 'text/plain'} + return Response(content, mimetype='text/plain') @showsavedpastes.route("/showpreviewpaste/") def showpreviewpaste(): num = request.args.get('num', '') - return "|num|"+num+"|num|"+showpaste(max_preview_modal) + requested_path = request.args.get('paste', '') + return "|num|"+num+"|num|"+showpaste(max_preview_modal, requested_path) @showsavedpastes.route("/getmoredata/") @@ -202,5 +242,26 @@ def showDiff(): the_html = htmlD.make_file(lines1, lines2) return the_html +@showsavedpastes.route('/send_file_to_vt/', methods=['POST']) +def send_file_to_vt(): + b64_path = request.form['b64_path'] + paste = request.form['paste'] + hash = request.form['hash'] + + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + b64_content = '' + with open(b64_full_path, 'rb') as f: + b64_content = f.read() + + files = {'file': (hash, b64_content)} + response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params=vt_auth) + json_response = response.json() + print(json_response) + + vt_b64_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_b64_link) + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=paste)) + # ========= REGISTRATION ========= app.register_blueprint(showsavedpastes) diff --git a/var/www/modules/showpaste/templates/show_saved_paste.html b/var/www/modules/showpaste/templates/show_saved_paste.html index 1340f471..afcd7249 100644 --- a/var/www/modules/showpaste/templates/show_saved_paste.html +++ b/var/www/modules/showpaste/templates/show_saved_paste.html @@ -1,7 +1,8 @@ <!DOCTYPE html> <html lang="en"> <head> - <title>Paste information</title> + <title>Paste information - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> @@ -27,6 +28,11 @@ overflow-x: hidden; width:100%; } + + .red_table thead{ + background: #d91f2d; + color: #fff; + } </style> </head> @@ -372,6 +378,48 @@ </tbody> </table> {% endif %} + + {% if l_64|length != 0 %} + <h3> Hash files: </h3> + <table id="tableb64" class="red_table table table-striped table-bordered"> + <thead> + <tr> + <th>estimated type</th> + <th>hash</th> + <th>saved_path</th> + <th>Virus Total</th> + </tr> + </thead> + <tbody> + {% for b64 in l_64 %} + <tr> + <td><i class="fa {{ b64[0] }}"></i> {{ b64[1] }}</td> + <td><a target="_blank" href="{{ url_for('hashDecoded.showHash') }}?hash={{ b64[2] }}">{{ b64[2] }}</a> ({{ b64[4] }})</td> + <td>{{ b64[3] }}</td> + <td style="text-align:center;"> + {% if vt_enabled %} + {% if not b64[5] %} + <darkbutton_{{ b64[2] }}> + <button id="submit_vt_{{ b64[2] }}" class="btn btn-primary" onclick="sendFileToVT('{{ b64[2] }}')"> + <i class="fa fa-paper-plane"></i> Send this file to VT + </button> + </darkbutton_{{ b64[2] }}> + {% else %} + <a class="btn btn-primary" target="_blank" href="{{ b64[6] }}"><i class="fa fa-link"> VT Report</i></a> + {% endif %} + <button class="btn btn-default" onclick="updateVTReport('{{ b64[2] }}')"> + <div id="report_vt_{{ b64[2] }}"><span class="glyphicon glyphicon-refresh"></span> {{ b64[7] }}</div> + </button> + {% else %} + Virus Total submission is disabled + {% endif %} + </td> + </tr> + {% endfor %} + </tbody> + </table> + {% endif %} + <h3> Content: </h3> <a href="{{ url_for('showsavedpastes.showsavedrawpaste') }}?paste={{ request.args.get('paste') }}" id='raw_paste' > [Raw content] </a> <p data-initsize="{{ initsize }}"> <pre id="paste-holder">{{ content }}</pre></p> @@ -406,9 +454,36 @@ }); $('#tableDup').DataTable(); + $('#tableb64').DataTable({ + "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]], + "iDisplayLength": 5, + "order": [[ 1, "asc" ]] + }); }); </script> +<script> + function updateVTReport(hash) { + //updateReport + $.getJSON('/hashDecoded/update_vt_result?hash='+hash, + function(data) { + content = '<span class="glyphicon glyphicon-refresh"></span> ' +data['report_vt'] + $( "#report_vt_"+hash ).html(content); + }); + } + + function sendFileToVT(hash) { + //send file to vt + $.getJSON('/hashDecoded/send_file_to_vt_js?hash='+hash, + function(data) { + var content = '<a id="submit_vt_'+hash+'" class="btn btn-primary" target="_blank" href="'+ data['vt_link'] +'"><i class="fa fa-link"> '+ ' VT Report' +'</i></a>'; + $('#submit_vt_'+hash).remove(); + $('darkbutton_'+hash).append(content); + }); + } + +</script> + <script> jQuery("#all-tags-taxonomies").click(function(e){ //change input tags list diff --git a/var/www/modules/terms/templates/credentials_tracker.html b/var/www/modules/terms/templates/credentials_tracker.html index e95e7db2..4d91e6f5 100644 --- a/var/www/modules/terms/templates/credentials_tracker.html +++ b/var/www/modules/terms/templates/credentials_tracker.html @@ -4,9 +4,10 @@ <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - - <title>Analysis Information Leak framework Dashboard</title> - + + <title>Credentials Tracker - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> + <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> <link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet"> @@ -24,19 +25,19 @@ <script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script> <script src="{{ url_for('static', filename='js/jquery.flot.time.js') }}"></script> <script src="{{ url_for('static', filename='js/jquery.flot.stack.js') }}"></script> - + <style> .btn-link { color: #000000 } </style> - + </head> <body> <!-- Modal --> <div id="mymodal" class="modal fade" role="dialog"> <div class="modal-dialog modal-lg"> - + <!-- Modal content--> <div id="mymodalcontent" class="modal-content"> <div id="mymodalbody" class="modal-body" max-width="8500px"> @@ -51,7 +52,7 @@ </div> </div> {% include 'navbar.html' %} - + <div id="page-wrapper"> <div class="row"> <div class="col-lg-12"> @@ -110,7 +111,7 @@ $('[data-toggle="tooltip"]').tooltip(); table_track = $('#myTable').DataTable({ "order": [[ 1, "dec" ]] }); - + table_track.on( 'draw.dt', function () { perform_binding(); }); @@ -146,7 +147,7 @@ var url = "{{ url_for('terms.credentials_management_query_paste') }}?cred=" + encodeURIComponent($(this).attr('data-term')); $.ajax({ type: 'POST', - url: url, + url: url, dataType: "json", data: JSON.stringify({ 'allPath': JSON.parse($(this).attr('data-path')) }), contentType : "application/json" @@ -190,10 +191,10 @@ } )}); } - + function perform_operation(){ var curr_section = $(this).attr('data-section'); - var curr_action = $(this).attr('data-action'); + var curr_action = $(this).attr('data-action'); if (curr_action == "add") { var curr_term = $('#'+curr_section+'Input').val(); } else if (curr_action == "seek") { @@ -202,13 +203,13 @@ var curr_term = $(this).attr('data-content'); } var data_to_send = { section: curr_section, action:curr_action, term: curr_term, extensive: $("#extensive").is(":checked")}; - + if (curr_term != "") { //console.log(data_to_send); $.get("{{ url_for('terms.cred_management_action') }}", data_to_send, function(data, status){ if(status == "success") { var json = data; - + if(json.action == "add") { //not used for the moment @@ -231,16 +232,16 @@ $( "#nodata" ).text(curr_term); $( "#nodata" ).fadeIn( "fast"); toAdd = "</button><span data-toggle=\"modal\" data-target=\"#mymodal\" data-term=\""+rep.usr[i]+"\" data-path=\"["+rep.path[i]+"]\" ><button class=\"btn-link\" data-toggle=\"tooltip\" data-placement=\"right\" title=\"Show concerned paste(s)\"><span class=\"glyphicon glyphicon-info-sign\"></span></button></span>"; - table_track.row.add( [ - rep.usr[i], - rep.simil[i], - rep.numPaste[i], + table_track.row.add( [ + rep.usr[i], + rep.simil[i], + rep.numPaste[i], toAdd+action_button ] ).draw( false ); } perform_binding(); perform_modal_binding(); } - } + } } }); } diff --git a/var/www/modules/terms/templates/header_terms.html b/var/www/modules/terms/templates/header_terms.html index 8fa38bc5..a19290a5 100644 --- a/var/www/modules/terms/templates/header_terms.html +++ b/var/www/modules/terms/templates/header_terms.html @@ -1,4 +1,4 @@ -<li id='page-termsfrequency'><a class="dropdown-toggle" data-toggle="dropdown" href="#"><i class="fa fa-eye"></i> Terms frequency +<li id='page-termsfrequency'><a class="dropdown-toggle" data-toggle="dropdown" href="{{ url_for('terms.terms_management') }}"><i class="fa fa-eye"></i> Terms frequency <span class="caret"></span></a> <ul class="dropdown-menu"> <li><a href="{{ url_for('terms.terms_management') }}"><i class="fa fa-gear "> </i> Terms managements</a></li> diff --git a/var/www/modules/terms/templates/terms_management.html b/var/www/modules/terms/templates/terms_management.html index 98ac9df7..0efda575 100644 --- a/var/www/modules/terms/templates/terms_management.html +++ b/var/www/modules/terms/templates/terms_management.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Terms Management</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/terms/templates/terms_plot_tool.html b/var/www/modules/terms/templates/terms_plot_tool.html index 6337acce..29930781 100644 --- a/var/www/modules/terms/templates/terms_plot_tool.html +++ b/var/www/modules/terms/templates/terms_plot_tool.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Terms Plot Tool - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> @@ -82,7 +83,7 @@ <!-- /.panel --> </div> </div> - + <!-- Panel PLOT --> <div class="row"> <div class="col-lg-12"> @@ -138,9 +139,9 @@ $( "#amount" ).val( new Date($( ".sliderRange" ).slider( "values", 0 )).toLocaleDateString() + " - " + new Date($( ".sliderRange" ).slider( "values", 1 )).toLocaleDateString() ); - - $('#plot-btn').click(plotData); - $('#plot-btn-add').click(addData); + + $('#plot-btn').click(plotData); + $('#plot-btn-add').click(addData); $("#TermInput").val($("#TermInput").attr("data-init-plot")); if($("#TermInput").attr("data-init-plot") != "") { @@ -163,16 +164,16 @@ var graph_data = []; var plotted_terms = []; var graph_options = { series: { - lines: { + lines: { show: true, lineWidth: 2 }, bars: {show: false, barWidth: 60*60*1000}, shadowSize: 0 }, - grid: { - hoverable: true, - clickable: true, + grid: { + hoverable: true, + clickable: true, tickColor: "#f9f9f9", borderWidth: 0 }, @@ -203,7 +204,7 @@ function plotData() { for(i=1; i<data.length; i++) { curr_data.push([data[i][0]*1000, data[i][1]]); } - to_plot.push({ data: curr_data, label: term}); + to_plot.push({ data: curr_data, label: term}); graph_data.push({ data: curr_data, label: term}); plot = $.plot($("#graph"), to_plot, graph_options); @@ -212,7 +213,7 @@ function plotData() { var date = new Date(item.datapoint[0]); var x = parseInt(date.getUTCMonth())+1 + "/" + date.getUTCDate(); var y = item.datapoint[1]; - + $("#tooltip").html(item.series.label + " for "+x + " = " + y) .css({top: item.pageY-15, left: item.pageX+5}) .fadeIn(200); @@ -264,13 +265,11 @@ function replot() { $("#TermInput").val(""); })) } - + $.when.apply($, promises).done( function () { plot = $.plot($("#graph"), graph_data, graph_options); }); - + } </script> - - diff --git a/var/www/modules/terms/templates/terms_plot_top.html b/var/www/modules/terms/templates/terms_plot_top.html index bf5add76..f08f1f8d 100644 --- a/var/www/modules/terms/templates/terms_plot_top.html +++ b/var/www/modules/terms/templates/terms_plot_top.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Terms Plot Top - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> diff --git a/var/www/modules/trendingcharts/templates/Trending.html b/var/www/modules/trendingcharts/templates/Trending.html index 4c7b5981..1eaf0696 100644 --- a/var/www/modules/trendingcharts/templates/Trending.html +++ b/var/www/modules/trendingcharts/templates/Trending.html @@ -9,7 +9,8 @@ <meta http-equiv="Pragma" content="no-cache" /> <meta http-equiv="Expires" content="0" /> - <title>Analysis Information Leak framework Dashboard</title> + <title>Trending Charts - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> @@ -22,7 +23,7 @@ <script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script> <script src="{{ url_for('static', filename='js/jquery.flot.pie.js') }}"></script> <script src="{{ url_for('static', filename='js/jquery.flot.time.js') }}"></script> - <script> + <script> var default_display = {{ default_display }}; var current_displayed_graph; </script> @@ -50,7 +51,7 @@ <li><a data-toggle="tab" href="#words-tab" data-pannel="WordTrending" data-path="../static//csv/wordstrendingdata.csv">Words</a></li> </ul> </br> - + <script> var chart_1_num_day = 5; var chart_2_num_day = 15; @@ -59,7 +60,7 @@ $SCRIPT_ROOT = {{ request.script_root|tojson|safe }}; <script type="text/javascript" src="{{ url_for('static', filename='js/trendingchart.js')}}"></script> - <div class="tab-content"> + <div class="tab-content"> <div class="col-lg-12 tab-pane fade in active" id="tld-tab" > {% include 'trending_graphs/Tldstrending.html' %} </div> @@ -71,7 +72,7 @@ $SCRIPT_ROOT = {{ request.script_root|tojson|safe }}; </div> <div class="col-lg-12 tab-pane fade" id="words-tab"> {% include 'trending_graphs/Wordstrending.html' %} - </div> + </div> </div> <!-- tab-content --> <!-- /.row --> </div> @@ -93,7 +94,7 @@ $SCRIPT_ROOT = {{ request.script_root|tojson|safe }}; $("[flash-"+attr_name+"]").css('color', '#fece00'); setTimeout(function() { $("[flash-"+attr_name+"]").css('color', 'black'); }, 1000); refresh_top_chart(attr_name, false); - if (active_tab_name == attr_name) + if (active_tab_name == attr_name) plot_top_graph(attr_name, false); }, refresh_interval); } @@ -115,7 +116,7 @@ $SCRIPT_ROOT = {{ request.script_root|tojson|safe }}; // When a pannel is shown, create_and_plot. $('.nav-tabs a').on('shown.bs.tab', function(event){ - create_and_plot($(event.target).attr('data-pannel'), $(event.target).attr('data-path')); + create_and_plot($(event.target).attr('data-pannel'), $(event.target).attr('data-path')); active_tab_name = $(event.target).attr('data-attribute-name') //Top progression chart if(launched_refresher.indexOf($(event.target).attr('data-attribute-name')) == -1){ @@ -133,7 +134,7 @@ $SCRIPT_ROOT = {{ request.script_root|tojson|safe }}; $("[align]").css({padding: "2px", width: 'auto', 'background': "rgba(102, 102, 102, 0.15)" , 'border': "3px solid rgb(102, 102, 102)"}) // Create the graph when the page has just loaded - create_and_plot("TldTrending", '../static//csv/tldstrendingdata.csv') + create_and_plot("TldTrending", '../static//csv/tldstrendingdata.csv') //Top progression chart refresh_top_chart("tld", true); }); diff --git a/var/www/modules/trendingmodules/templates/Moduletrending.html b/var/www/modules/trendingmodules/templates/Moduletrending.html index bfc0455f..e7ac8232 100644 --- a/var/www/modules/trendingmodules/templates/Moduletrending.html +++ b/var/www/modules/trendingmodules/templates/Moduletrending.html @@ -5,7 +5,8 @@ <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Analysis Information Leak framework Dashboard</title> + <title>Modules Statistics - AIL</title> + <link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}"> <!-- Core CSS --> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> @@ -44,7 +45,7 @@ $("[align]").css({padding: "2px", width: 'auto', 'background': "rgba(102, 102, 102, 0.15)" , 'border': "3px solid rgb(102, 102, 102)"}) - refreshPlot(true); + refreshPlot(true); }); function refreshPlot(init){ @@ -63,7 +64,7 @@ setTimeout(function() { $("[flash]").css('color', 'black'); }, 1000); } </script> - + </div> <script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script> </body> diff --git a/var/www/static/css/tags.css b/var/www/static/css/tags.css index baa0f673..779ce4ff 100644 --- a/var/www/static/css/tags.css +++ b/var/www/static/css/tags.css @@ -47,7 +47,6 @@ -webkit-transition: none; outline: none; display: block; - padding: 4px 6px; line-height: normal; overflow: hidden; height: auto; diff --git a/var/www/static/image/ail-icon.png b/var/www/static/image/ail-icon.png new file mode 100644 index 00000000..e79686ff Binary files /dev/null and b/var/www/static/image/ail-icon.png differ diff --git a/var/www/update_thirdparty.sh b/var/www/update_thirdparty.sh index 9084b99d..76ae9174 100755 --- a/var/www/update_thirdparty.sh +++ b/var/www/update_thirdparty.sh @@ -6,6 +6,7 @@ wget http://dygraphs.com/dygraph-combined.js -O ./static/js/dygraph-combined.js SBADMIN_VERSION='3.3.7' FONT_AWESOME_VERSION='4.7.0' +D3_JS_VERSION='5.5.0' rm -rf temp mkdir temp @@ -13,9 +14,21 @@ mkdir temp wget https://github.com/BlackrockDigital/startbootstrap-sb-admin/archive/v${SBADMIN_VERSION}.zip -O temp/${SBADMIN_VERSION}.zip wget https://github.com/BlackrockDigital/startbootstrap-sb-admin-2/archive/v${SBADMIN_VERSION}.zip -O temp/${SBADMIN_VERSION}-2.zip wget https://github.com/FortAwesome/Font-Awesome/archive/v${FONT_AWESOME_VERSION}.zip -O temp/FONT_AWESOME_${FONT_AWESOME_VERSION}.zip +wget https://github.com/d3/d3/releases/download/v${D3_JS_VERSION}/d3.zip -O temp/d3_${D3_JS_VERSION}.zip + +# dateRangePicker +wget https://github.com/moment/moment/archive/2.22.2.zip -O temp/moment_2.22.2.zip +wget https://github.com/longbill/jquery-date-range-picker/archive/v0.18.0.zip -O temp/daterangepicker_v0.18.0.zip + + unzip temp/${SBADMIN_VERSION}.zip -d temp/ unzip temp/${SBADMIN_VERSION}-2.zip -d temp/ unzip temp/FONT_AWESOME_${FONT_AWESOME_VERSION}.zip -d temp/ +unzip temp/d3_${D3_JS_VERSION}.zip -d temp/ + +unzip temp/moment_2.22.2.zip -d temp/ +unzip temp/daterangepicker_v0.18.0.zip -d temp/ + mv temp/startbootstrap-sb-admin-${SBADMIN_VERSION} temp/sb-admin mv temp/startbootstrap-sb-admin-2-${SBADMIN_VERSION} temp/sb-admin-2 mv temp/Font-Awesome-${FONT_AWESOME_VERSION} temp/font-awesome @@ -30,6 +43,11 @@ mv temp/font-awesome/ ./static/ rm -rf ./static/css/plugins/ mv temp/sb-admin/css/* ./static/css/ mv temp/sb-admin-2/dist/css/* ./static/css/ +mv temp/jquery-date-range-picker-0.18.0/dist/daterangepicker.min.css ./static/css/ + +mv temp/d3.min.js ./static/js/ +mv temp/moment-2.22.2/min/moment.min.js ./static/js/ +mv temp/jquery-date-range-picker-0.18.0/dist/jquery.daterangepicker.min.js ./static/js/ rm -rf temp