mirror of https://github.com/CIRCL/AIL-framework
parent
a4c03b4ba4
commit
2606220c2b
|
@ -124,6 +124,11 @@ if __name__ == "__main__":
|
|||
for url in sites:
|
||||
faup.decode(url)
|
||||
domain = faup.get()['domain']
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
domain = domain.decode()
|
||||
except:
|
||||
pass
|
||||
if domain in creds_sites.keys():
|
||||
creds_sites[domain] += 1
|
||||
else:
|
||||
|
@ -143,6 +148,11 @@ if __name__ == "__main__":
|
|||
maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
|
||||
faup.decode(maildomains)
|
||||
tld = faup.get()['tld']
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
tld = tld.decode()
|
||||
except:
|
||||
pass
|
||||
server_statistics.hincrby('credential_by_tld:'+date, tld, 1)
|
||||
else:
|
||||
publisher.info(to_print)
|
||||
|
|
|
@ -29,8 +29,17 @@ def analyse(url, path):
|
|||
faup.decode(url)
|
||||
url_parsed = faup.get()
|
||||
pprint.pprint(url_parsed)
|
||||
resource_path = url_parsed['resource_path']
|
||||
query_string = url_parsed['query_string']
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
resource_path = url_parsed['resource_path'].encode()
|
||||
except:
|
||||
resource_path = url_parsed['resource_path']
|
||||
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
query_string = url_parsed['query_string'].encode()
|
||||
except:
|
||||
query_string = url_parsed['query_string']
|
||||
|
||||
result_path = {'sqli' : False}
|
||||
result_query = {'sqli' : False}
|
||||
|
@ -56,7 +65,11 @@ def analyse(url, path):
|
|||
p.populate_set_out(msg, 'Tags')
|
||||
|
||||
#statistics
|
||||
tld = url_parsed['tld']
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
tld = url_parsed['tld'].decode()
|
||||
except:
|
||||
tld = url_parsed['tld']
|
||||
if tld is not None:
|
||||
date = datetime.datetime.now().strftime("%Y%m")
|
||||
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)
|
||||
|
|
|
@ -95,6 +95,10 @@ if __name__ == "__main__":
|
|||
|
||||
faup.decode(mail)
|
||||
tld = faup.get()['tld']
|
||||
try:
|
||||
tld = tld.decode()
|
||||
except:
|
||||
pass
|
||||
server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail])
|
||||
|
||||
else:
|
||||
|
|
|
@ -68,10 +68,20 @@ def analyse(url, path):
|
|||
result_query = 0
|
||||
|
||||
if resource_path is not None:
|
||||
result_path = is_sql_injection(resource_path.decode('utf8'))
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
resource_path = resource_path.decode()
|
||||
except:
|
||||
pass
|
||||
result_path = is_sql_injection(resource_path)
|
||||
|
||||
if query_string is not None:
|
||||
result_query = is_sql_injection(query_string.decode('utf8'))
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
query_string = query_string.decode()
|
||||
except:
|
||||
pass
|
||||
result_query = is_sql_injection(query_string)
|
||||
|
||||
if (result_path > 0) or (result_query > 0):
|
||||
paste = Paste.Paste(path)
|
||||
|
@ -89,6 +99,11 @@ def analyse(url, path):
|
|||
#statistics
|
||||
tld = url_parsed['tld']
|
||||
if tld is not None:
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
tld = tld.decode()
|
||||
except:
|
||||
pass
|
||||
date = datetime.datetime.now().strftime("%Y%m")
|
||||
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)
|
||||
|
||||
|
|
16
bin/Web.py
16
bin/Web.py
|
@ -94,18 +94,22 @@ if __name__ == "__main__":
|
|||
faup.decode(url)
|
||||
domain = faup.get_domain()
|
||||
subdomain = faup.get_subdomain()
|
||||
f1 = None
|
||||
|
||||
publisher.debug('{} Published'.format(url))
|
||||
|
||||
if f1 == "onion":
|
||||
print(domain)
|
||||
|
||||
if subdomain is not None:
|
||||
subdomain = subdomain.decode('utf8')
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
subdomain = subdomain.decode()
|
||||
except:
|
||||
pass
|
||||
|
||||
if domain is not None:
|
||||
domain = domain.decode('utf8')
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
domain = domain.decode()
|
||||
except:
|
||||
pass
|
||||
domains_list.append(domain)
|
||||
|
||||
hostl = avoidNone(subdomain) + avoidNone(domain)
|
||||
|
|
|
@ -115,7 +115,12 @@ def get_type_domain(domain):
|
|||
def get_domain_from_url(url):
|
||||
faup.decode(url)
|
||||
unpack_url = faup.get()
|
||||
domain = unpack_url['domain'].decode()
|
||||
domain = unpack_url['domain']
|
||||
## TODO: FIXME remove me
|
||||
try:
|
||||
domain = domain.decode()
|
||||
except:
|
||||
pass
|
||||
return domain
|
||||
|
||||
def get_last_domains_crawled(type):
|
||||
|
@ -418,8 +423,19 @@ def create_spider_splash():
|
|||
# get service_type
|
||||
faup.decode(url)
|
||||
unpack_url = faup.get()
|
||||
domain = unpack_url['domain'].decode()
|
||||
if unpack_url['tld'] == b'onion':
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
domain = unpack_url['domain'].decode()
|
||||
except:
|
||||
domain = unpack_url['domain']
|
||||
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
tld = unpack_url['tld'].decode()
|
||||
except:
|
||||
tld = unpack_url['tld']
|
||||
|
||||
if tld == 'onion':
|
||||
service_type = 'onion'
|
||||
else:
|
||||
service_type = 'regular'
|
||||
|
@ -694,10 +710,19 @@ def show_domain():
|
|||
port = request.args.get('port')
|
||||
faup.decode(domain)
|
||||
unpack_url = faup.get()
|
||||
domain = unpack_url['domain'].decode()
|
||||
|
||||
## TODO: # FIXME: remove me
|
||||
try:
|
||||
domain = unpack_url['domain'].decode()
|
||||
except:
|
||||
domain = unpack_url['domain']
|
||||
|
||||
if not port:
|
||||
if unpack_url['port']:
|
||||
port = unpack_url['port'].decode()
|
||||
try:
|
||||
port = unpack_url['port'].decode()
|
||||
except:
|
||||
port = unpack_url['port']
|
||||
else:
|
||||
port = 80
|
||||
try:
|
||||
|
|
Loading…
Reference in New Issue