2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-02-05 16:15:09 +01:00
|
|
|
# -*-coding:UTF-8 -*
|
2017-05-09 11:13:16 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
The Credential Module
|
|
|
|
=====================
|
|
|
|
|
|
|
|
This module is consuming the Redis-list created by the Categ module.
|
|
|
|
|
2020-05-20 17:03:58 +02:00
|
|
|
It apply credential regexes on item content and warn if above a threshold.
|
2017-05-09 11:13:16 +02:00
|
|
|
|
2017-07-18 16:57:15 +02:00
|
|
|
It also split the username and store it into redis for searching purposes.
|
|
|
|
|
|
|
|
Redis organization:
|
|
|
|
uniqNumForUsername: unique number attached to unique username
|
|
|
|
uniqNumForPath: unique number attached to unique path
|
2017-07-20 10:24:48 +02:00
|
|
|
-> uniqNum are used to avoid string duplication
|
2017-07-18 16:57:15 +02:00
|
|
|
AllCredentials: hashed set where keys are username and value are their uniq number
|
|
|
|
AllCredentialsRev: the opposite of AllCredentials, uniqNum -> username
|
|
|
|
AllPath: hashed set where keys are path and value are their uniq number
|
|
|
|
AllPathRev: the opposite of AllPath, uniqNum -> path
|
2017-07-20 10:04:30 +02:00
|
|
|
CredToPathMapping_uniqNumForUsername -> (set) -> uniqNumForPath
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2017-05-09 11:13:16 +02:00
|
|
|
"""
|
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
##################################
|
|
|
|
# Import External packages
|
|
|
|
##################################
|
2020-05-20 17:03:58 +02:00
|
|
|
import os
|
2016-07-26 10:45:02 +02:00
|
|
|
import sys
|
2021-06-02 14:42:23 +02:00
|
|
|
import time
|
2016-02-05 16:15:09 +01:00
|
|
|
import re
|
2017-07-18 16:57:15 +02:00
|
|
|
import redis
|
2021-06-02 14:42:23 +02:00
|
|
|
from datetime import datetime
|
2016-07-26 10:45:02 +02:00
|
|
|
from pyfaup.faup import Faup
|
2021-04-28 15:24:33 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
sys.path.append(os.environ['AIL_BIN'])
|
2021-04-28 15:24:33 +02:00
|
|
|
##################################
|
|
|
|
# Import Project packages
|
|
|
|
##################################
|
2021-06-02 14:42:23 +02:00
|
|
|
from modules.abstract_module import AbstractModule
|
|
|
|
from packages.Item import Item
|
|
|
|
from lib import ConfigLoader
|
|
|
|
from lib import regex_helper
|
2020-05-04 11:02:24 +02:00
|
|
|
|
2017-07-18 16:57:15 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
class Credential(AbstractModule):
|
|
|
|
"""
|
|
|
|
Credential module for AIL framework
|
|
|
|
"""
|
2018-04-16 14:50:04 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Split username with spec. char or with upper case, distinguish start with upper
|
|
|
|
REGEX_CRED = "[a-z]+|[A-Z]{3,}|[A-Z]{1,2}[a-z]+|[0-9]+"
|
|
|
|
REDIS_KEY_NUM_USERNAME = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_NUM_PATH = 'uniqNumForUsername'
|
|
|
|
REDIS_KEY_ALL_CRED_SET = 'AllCredentials'
|
|
|
|
REDIS_KEY_ALL_CRED_SET_REV = 'AllCredentialsRev'
|
|
|
|
REDIS_KEY_ALL_PATH_SET = 'AllPath'
|
|
|
|
REDIS_KEY_ALL_PATH_SET_REV = 'AllPathRev'
|
|
|
|
REDIS_KEY_MAP_CRED_TO_PATH = 'CredToPathMapping'
|
2018-07-30 16:36:34 +02:00
|
|
|
|
2020-05-20 17:03:58 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
def __init__(self):
|
|
|
|
super(Credential, self).__init__()
|
2020-05-20 17:03:58 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
self.faup = Faup()
|
2016-02-10 16:39:06 +01:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
self.regex_web = "((?:https?:\/\/)[\.-_0-9a-zA-Z]+\.[0-9a-zA-Z]+)"
|
|
|
|
self.regex_cred = "[a-zA-Z0-9\\._-]+@[a-zA-Z0-9\\.-]+\.[a-zA-Z]{2,6}[\\rn :\_\-]{1,10}[a-zA-Z0-9\_\-]+"
|
|
|
|
self.regex_site_for_stats = "@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}:"
|
2020-05-04 11:02:24 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name)
|
2016-02-10 16:39:06 +01:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Database
|
2021-06-02 14:42:23 +02:00
|
|
|
config_loader = ConfigLoader.ConfigLoader()
|
|
|
|
self.server_cred = config_loader.get_redis_conn("ARDB_TermCred")
|
|
|
|
self.server_statistics = config_loader.get_redis_conn("ARDB_Statistics")
|
2016-02-10 16:39:06 +01:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Config values
|
2021-06-02 14:42:23 +02:00
|
|
|
self.minimumLengthThreshold = config_loader.get_config_int("Credential", "minimumLengthThreshold")
|
|
|
|
self.criticalNumberToAlert = config_loader.get_config_int("Credential", "criticalNumberToAlert")
|
|
|
|
|
|
|
|
self.max_execution_time = 30
|
2016-02-10 16:39:06 +01:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Waiting time in secondes between to message proccessed
|
|
|
|
self.pending_seconds = 10
|
2016-02-10 16:39:06 +01:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Send module state to logs
|
|
|
|
self.redis_logger.info(f"Module {self.module_name} initialized")
|
2018-04-16 14:50:04 +02:00
|
|
|
|
2018-05-16 14:39:01 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
def compute(self, message):
|
2020-05-04 11:11:35 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
id, count = message.split()
|
|
|
|
item = Item(id)
|
2016-07-26 10:45:02 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
item_content = item.get_content()
|
2018-04-26 14:42:39 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Extract all credentials
|
2021-06-02 14:42:23 +02:00
|
|
|
all_credentials = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.regex_cred, item.get_id(), item_content, max_time=self.max_execution_time)
|
2016-07-25 16:38:57 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
if all_credentials:
|
2021-04-28 15:24:33 +02:00
|
|
|
nb_cred = len(all_credentials)
|
|
|
|
message = f'Checked {nb_cred} credentials found.'
|
2021-06-02 14:42:23 +02:00
|
|
|
|
|
|
|
all_sites = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.regex_web, item.get_id(), item_content, max_time=self.max_execution_time)
|
2020-05-20 17:03:58 +02:00
|
|
|
if all_sites:
|
2021-04-28 15:24:33 +02:00
|
|
|
discovered_sites = ', '.join(all_sites)
|
|
|
|
message += f' Related websites: {discovered_sites}'
|
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
print(message)
|
|
|
|
|
|
|
|
to_print = f'Credential;{item.get_source()};{item.get_date()};{item.get_basename()};{message};{item.get_id()}'
|
2021-04-28 15:24:33 +02:00
|
|
|
|
|
|
|
#num of creds above tresh, publish an alert
|
|
|
|
if nb_cred > self.criticalNumberToAlert:
|
2021-06-02 14:42:23 +02:00
|
|
|
print(f"========> Found more than 10 credentials in this file : {item.get_id()}")
|
2021-04-28 15:24:33 +02:00
|
|
|
self.redis_logger.warning(to_print)
|
2021-06-02 14:42:23 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
# Send to duplicate
|
2021-06-02 14:42:23 +02:00
|
|
|
self.send_message_to_queue(item.get_id(), 'Duplicate')
|
2021-04-28 15:24:33 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
msg = f'infoleak:automatic-detection="credential";{item.get_id()}'
|
|
|
|
self.send_message_to_queue(msg, 'Tags')
|
2021-04-28 15:24:33 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
site_occurence = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.regex_site_for_stats, item.get_id(), item_content, max_time=self.max_execution_time, r_set=False)
|
2021-04-28 15:24:33 +02:00
|
|
|
|
|
|
|
creds_sites = {}
|
|
|
|
|
|
|
|
for site in site_occurence:
|
|
|
|
site_domain = site[1:-1].lower()
|
|
|
|
if site_domain in creds_sites.keys():
|
|
|
|
creds_sites[site_domain] += 1
|
|
|
|
else:
|
|
|
|
creds_sites[site_domain] = 1
|
|
|
|
|
|
|
|
for url in all_sites:
|
|
|
|
self.faup.decode(url)
|
|
|
|
domain = self.faup.get()['domain']
|
2021-06-02 14:42:23 +02:00
|
|
|
## TODO: # FIXME: remove me, check faup versionb
|
2021-04-28 15:24:33 +02:00
|
|
|
try:
|
|
|
|
domain = domain.decode()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if domain in creds_sites.keys():
|
|
|
|
creds_sites[domain] += 1
|
|
|
|
else:
|
|
|
|
creds_sites[domain] = 1
|
|
|
|
|
|
|
|
for site, num in creds_sites.items(): # Send for each different site to moduleStats
|
|
|
|
|
2021-08-17 15:59:27 +02:00
|
|
|
mssg = f'credential;{num};{site};{item.get_date()}'
|
2021-06-02 14:42:23 +02:00
|
|
|
print(mssg)
|
2021-08-18 15:36:05 +02:00
|
|
|
self.send_message_to_queue(mssg, 'ModuleStats')
|
2021-04-28 15:24:33 +02:00
|
|
|
|
|
|
|
if all_sites:
|
|
|
|
discovered_sites = ', '.join(all_sites)
|
2021-06-02 14:42:23 +02:00
|
|
|
print(f"=======> Probably on : {discovered_sites}")
|
2021-04-28 15:24:33 +02:00
|
|
|
|
2021-06-02 14:42:23 +02:00
|
|
|
date = datetime.now().strftime("%Y%m")
|
2021-04-28 15:24:33 +02:00
|
|
|
for cred in all_credentials:
|
|
|
|
maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
|
|
|
|
self.faup.decode(maildomains)
|
|
|
|
tld = self.faup.get()['tld']
|
|
|
|
## TODO: # FIXME: remove me
|
|
|
|
try:
|
|
|
|
tld = tld.decode()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.server_statistics.hincrby('credential_by_tld:'+date, tld, 1)
|
|
|
|
else:
|
|
|
|
self.redis_logger.info(to_print)
|
2021-06-02 14:42:23 +02:00
|
|
|
print(f'found {nb_cred} credentials')
|
2021-04-28 15:24:33 +02:00
|
|
|
|
|
|
|
# For searching credential in termFreq
|
2020-05-20 17:03:58 +02:00
|
|
|
for cred in all_credentials:
|
2021-04-28 15:24:33 +02:00
|
|
|
cred = cred.split('@')[0] #Split to ignore mail address
|
|
|
|
|
|
|
|
# unique number attached to unique path
|
|
|
|
uniq_num_path = self.server_cred.incr(Credential.REDIS_KEY_NUM_PATH)
|
2021-06-02 14:42:23 +02:00
|
|
|
self.server_cred.hmset(Credential.REDIS_KEY_ALL_PATH_SET, {item.get_id(): uniq_num_path})
|
|
|
|
self.server_cred.hmset(Credential.REDIS_KEY_ALL_PATH_SET_REV, {uniq_num_path: item.get_id()})
|
2021-04-28 15:24:33 +02:00
|
|
|
|
|
|
|
# unique number attached to unique username
|
|
|
|
uniq_num_cred = self.server_cred.hget(Credential.REDIS_KEY_ALL_CRED_SET, cred)
|
|
|
|
if uniq_num_cred is None:
|
|
|
|
# cred do not exist, create new entries
|
|
|
|
uniq_num_cred = self.server_cred.incr(Credential.REDIS_KEY_NUM_USERNAME)
|
|
|
|
self.server_cred.hmset(Credential.REDIS_KEY_ALL_CRED_SET, {cred: uniq_num_cred})
|
|
|
|
self.server_cred.hmset(Credential.REDIS_KEY_ALL_CRED_SET_REV, {uniq_num_cred: cred})
|
|
|
|
|
|
|
|
# Add the mapping between the credential and the path
|
|
|
|
self.server_cred.sadd(Credential.REDIS_KEY_MAP_CRED_TO_PATH+'_'+str(uniq_num_cred), uniq_num_path)
|
|
|
|
|
|
|
|
# Split credentials on capital letters, numbers, dots and so on
|
|
|
|
# Add the split to redis, each split point towards its initial credential unique number
|
|
|
|
splitedCred = re.findall(Credential.REGEX_CRED, cred)
|
|
|
|
for partCred in splitedCred:
|
|
|
|
if len(partCred) > self.minimumLengthThreshold:
|
|
|
|
self.server_cred.sadd(partCred, uniq_num_cred)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2021-06-02 14:42:23 +02:00
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
module = Credential()
|
|
|
|
module.run()
|