diff --git a/bin/Cve.py b/bin/Cve.py new file mode 100755 index 00000000..7323ee5a --- /dev/null +++ b/bin/Cve.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python2 +# -*-coding:UTF-8 -* +""" + Template for new modules +""" + +import time +import re +from pubsublogger import publisher +from packages import Paste +from Helper import Process + + +def search_cve(message): + filepath, count = message.split() + paste = Paste.Paste(filepath) + content = paste.get_p_content() + # regex to find CVE + reg_cve = re.compile(r'(CVE-)[1-2]\d{1,4}-\d{1,5}') + # list of the regex results in the Paste, may be null + results = set(reg_cve.findall(content)) + + # if the list is greater than 2, we consider the Paste may contain a list of cve + if len(results) > 0: + print('{} contains CVEs'.format(paste.p_name)) + publisher.warning('{} contains CVEs'.format(paste.p_name)) + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'Cve' + + # Setup the I/O queues + p = Process(config_section) + + # Sent to the logging a description of the module + publisher.info("Run CVE module") + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + # Do something with the message from the queue + search_cve(message) + + # (Optional) Send that thing to the next queue + #p.populate_set_out(something_has_been_done) diff --git a/bin/WebStats.py b/bin/WebStats.py new file mode 100755 index 00000000..15508e52 --- /dev/null +++ b/bin/WebStats.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python2 +# -*-coding:UTF-8 -* +""" + Template for new modules +""" + +import time +import re +import redis +import os +from pubsublogger import publisher +from packages import Paste +from Helper import Process + + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'WebStats' + + # Setup the I/O queues + p = Process(config_section) + + # Sent to the logging a description of the module + publisher.info("Makes statistics about valid URL") + + # REDIS # + r_serv1 = redis.StrictRedis( + host=p.config.get("Redis_Level_DB", "host"), + port=p.config.get("Redis_Level_DB", "port"), + db=p.config.get("Redis_Level_DB", "db")) + + # FILE CURVE SECTION # + csv_path = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "protocolstrending_csv")) + protocolsfile_path = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "protocolsfile")) + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + generate_new_graph = False + + if message is None: + if generate_new_graph: + generate_new_graph = False + print 'Building graph' + today = datetime.date.today() + year = today.year + month = today.month + lib_words.create_curve_with_word_file(r_serv1, csv_path, + protocolsfile_path, year, + month) + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + else: + generate_new_graph = True + # Do something with the message from the queue + scheme, credential, subdomain, domain, host, tld, \ + port, resource_path, query_string, f1, f2, f3, \ + f4 , date= message.split() + + prev_score = r_serv1.hget(scheme, date) + if prev_score is not None: + r_serv1.hset(scheme, date, int(prev_score) + int(score)) + else: + r_serv1.hset(scheme, date, score) + + + + + + + + + + + + + + + + + + diff --git a/bin/empty_queue.py b/bin/empty_queue.py new file mode 100755 index 00000000..6f82af90 --- /dev/null +++ b/bin/empty_queue.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python2 +# -*-coding:UTF-8 -* + +""" +The Duplicate module +==================== + +This huge module is, in short term, checking duplicates. + +Requirements: +------------- + + +""" +import redis +import os +import time +from packages import Paste +from pubsublogger import publisher +from Helper import Process + +if __name__ == "__main__": + publisher.port = 6380 + publisher.channel = "Script" + + config_section = ['Global', 'Duplicates', 'Indexer', 'Attributes', 'Lines', 'DomClassifier', 'Tokenize', 'Curve', 'Categ', 'CreditCards', 'Mail', 'Onion', 'DumpValidOnion', 'Web', 'WebStats', 'Release', 'Credential', 'Cve', 'Phone', 'SourceCode', 'Keys'] + + for queue in config_section: + print 'dropping: ' + queue + p = Process(queue) + while True: + message = p.get_from_set() + if message is None: + break + diff --git a/files/Cve b/files/Cve new file mode 100644 index 00000000..1d7b65c5 --- /dev/null +++ b/files/Cve @@ -0,0 +1 @@ +CVE diff --git a/files/protocolsfile b/files/protocolsfile new file mode 100644 index 00000000..f36a40f5 --- /dev/null +++ b/files/protocolsfile @@ -0,0 +1,3 @@ +FTP +HTTP +HTTPS diff --git a/var/www/templates/Protocolstrending.html b/var/www/templates/Protocolstrending.html new file mode 100644 index 00000000..795ca0ab --- /dev/null +++ b/var/www/templates/Protocolstrending.html @@ -0,0 +1,196 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + +
+ +
+
+
+

ProtocolsTrendings

+
+ +
+ +
+
+
+
+ Protocols Trend +
+
+ + +
+
+
+ +
+ +
+
+ +
+
+ +
+ + +
+ + + +