2018-05-04 13:53:29 +02:00
|
|
|
#!/usr/bin/env python3
|
2014-08-06 11:43:40 +02:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
|
2017-05-09 11:13:16 +02:00
|
|
|
"""
|
2018-07-30 16:36:34 +02:00
|
|
|
The Mail Module
|
2017-05-09 11:13:16 +02:00
|
|
|
======================
|
|
|
|
|
|
|
|
This module is consuming the Redis-list created by the Categ module.
|
|
|
|
|
|
|
|
It apply mail regexes on paste content and warn if above a threshold.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2014-08-14 17:55:18 +02:00
|
|
|
import redis
|
|
|
|
import time
|
2018-07-30 09:21:22 +02:00
|
|
|
import datetime
|
2014-08-11 09:27:50 +02:00
|
|
|
import dns.exception
|
2014-08-20 15:14:57 +02:00
|
|
|
from packages import Paste
|
2014-08-06 11:43:40 +02:00
|
|
|
from packages import lib_refine
|
|
|
|
from pubsublogger import publisher
|
|
|
|
|
2018-07-30 10:19:26 +02:00
|
|
|
from pyfaup.faup import Faup
|
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
from Helper import Process
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2014-08-20 15:14:57 +02:00
|
|
|
if __name__ == "__main__":
|
2014-08-22 17:35:40 +02:00
|
|
|
publisher.port = 6380
|
2014-08-20 15:14:57 +02:00
|
|
|
publisher.channel = "Script"
|
2014-08-14 17:55:18 +02:00
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
config_section = 'Mail'
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2018-07-30 10:19:26 +02:00
|
|
|
faup = Faup()
|
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
p = Process(config_section)
|
2018-05-02 17:07:10 +02:00
|
|
|
addr_dns = p.config.get("Mail", "dns")
|
2014-08-06 11:43:40 +02:00
|
|
|
|
2014-08-20 15:14:57 +02:00
|
|
|
# REDIS #
|
2014-08-06 11:43:40 +02:00
|
|
|
r_serv2 = redis.StrictRedis(
|
2014-08-29 19:37:56 +02:00
|
|
|
host=p.config.get("Redis_Cache", "host"),
|
|
|
|
port=p.config.getint("Redis_Cache", "port"),
|
2018-05-04 13:53:29 +02:00
|
|
|
db=p.config.getint("Redis_Cache", "db"),
|
|
|
|
decode_responses=True)
|
2018-07-30 09:21:22 +02:00
|
|
|
# ARDB #
|
|
|
|
server_statistics = redis.StrictRedis(
|
|
|
|
host=p.config.get("ARDB_Statistics", "host"),
|
|
|
|
port=p.config.getint("ARDB_Statistics", "port"),
|
|
|
|
db=p.config.getint("ARDB_Statistics", "db"),
|
|
|
|
decode_responses=True)
|
2014-08-06 11:43:40 +02:00
|
|
|
|
|
|
|
# FUNCTIONS #
|
|
|
|
publisher.info("Suscribed to channel mails_categ")
|
|
|
|
|
2014-08-29 19:37:56 +02:00
|
|
|
# FIXME For retro compatibility
|
|
|
|
channel = 'mails_categ'
|
|
|
|
|
|
|
|
message = p.get_from_set()
|
2014-08-06 11:43:40 +02:00
|
|
|
prec_filename = None
|
|
|
|
|
2014-08-20 15:14:57 +02:00
|
|
|
# Log as critical if there are more that that amout of valid emails
|
|
|
|
is_critical = 10
|
|
|
|
|
2014-08-06 11:43:40 +02:00
|
|
|
email_regex = "[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}"
|
2014-09-04 11:46:07 +02:00
|
|
|
MX_values = None
|
2014-08-06 11:43:40 +02:00
|
|
|
while True:
|
2014-09-04 11:46:07 +02:00
|
|
|
if message is not None:
|
2014-09-05 17:05:45 +02:00
|
|
|
filename, score = message.split()
|
2014-09-04 11:46:07 +02:00
|
|
|
|
|
|
|
if prec_filename is None or filename != prec_filename:
|
|
|
|
PST = Paste.Paste(filename)
|
|
|
|
MX_values = lib_refine.checking_MX_record(
|
2018-05-02 17:07:10 +02:00
|
|
|
r_serv2, PST.get_regex(email_regex), addr_dns)
|
2014-09-04 11:46:07 +02:00
|
|
|
|
|
|
|
if MX_values[0] >= 1:
|
|
|
|
|
|
|
|
PST.__setattr__(channel, MX_values)
|
|
|
|
PST.save_attribute_redis(channel, (MX_values[0],
|
|
|
|
list(MX_values[1])))
|
|
|
|
|
2016-10-27 11:27:26 +02:00
|
|
|
to_print = 'Mails;{};{};{};Checked {} e-mail(s);{}'.\
|
2014-09-04 11:46:07 +02:00
|
|
|
format(PST.p_source, PST.p_date, PST.p_name,
|
2018-11-02 16:07:27 +01:00
|
|
|
MX_values[0], PST.p_rel_path)
|
2014-09-04 11:46:07 +02:00
|
|
|
if MX_values[0] > is_critical:
|
|
|
|
publisher.warning(to_print)
|
2016-07-18 16:22:33 +02:00
|
|
|
#Send to duplicate
|
2016-07-25 16:38:57 +02:00
|
|
|
p.populate_set_out(filename, 'Duplicate')
|
2018-04-16 14:50:04 +02:00
|
|
|
|
2018-05-16 14:39:01 +02:00
|
|
|
msg = 'infoleak:automatic-detection="mail";{}'.format(filename)
|
|
|
|
p.populate_set_out(msg, 'Tags')
|
|
|
|
|
2018-07-30 16:36:34 +02:00
|
|
|
#create country statistics
|
|
|
|
date = datetime.datetime.now().strftime("%Y%m")
|
|
|
|
for mail in MX_values[1]:
|
|
|
|
print('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date))
|
|
|
|
p.populate_set_out('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date), 'ModuleStats')
|
|
|
|
|
|
|
|
faup.decode(mail)
|
|
|
|
tld = faup.get()['tld']
|
|
|
|
server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail])
|
|
|
|
|
2014-09-04 11:46:07 +02:00
|
|
|
else:
|
|
|
|
publisher.info(to_print)
|
2018-07-30 16:36:34 +02:00
|
|
|
#create country statistics
|
2016-07-25 16:38:57 +02:00
|
|
|
for mail in MX_values[1]:
|
2018-07-30 09:21:22 +02:00
|
|
|
print('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date))
|
|
|
|
p.populate_set_out('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date), 'ModuleStats')
|
2018-07-30 10:19:26 +02:00
|
|
|
|
2014-09-04 11:46:07 +02:00
|
|
|
prec_filename = filename
|
|
|
|
|
|
|
|
else:
|
|
|
|
publisher.debug("Script Mails is Idling 10s")
|
2018-04-16 14:50:04 +02:00
|
|
|
print('Sleeping')
|
2014-09-04 11:46:07 +02:00
|
|
|
time.sleep(10)
|
|
|
|
|
|
|
|
message = p.get_from_set()
|