mirror of https://github.com/CIRCL/AIL-framework
parent
a4c03b4ba4
commit
2606220c2b
|
@ -124,6 +124,11 @@ if __name__ == "__main__":
|
||||||
for url in sites:
|
for url in sites:
|
||||||
faup.decode(url)
|
faup.decode(url)
|
||||||
domain = faup.get()['domain']
|
domain = faup.get()['domain']
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
domain = domain.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
if domain in creds_sites.keys():
|
if domain in creds_sites.keys():
|
||||||
creds_sites[domain] += 1
|
creds_sites[domain] += 1
|
||||||
else:
|
else:
|
||||||
|
@ -143,6 +148,11 @@ if __name__ == "__main__":
|
||||||
maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
|
maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
|
||||||
faup.decode(maildomains)
|
faup.decode(maildomains)
|
||||||
tld = faup.get()['tld']
|
tld = faup.get()['tld']
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
tld = tld.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
server_statistics.hincrby('credential_by_tld:'+date, tld, 1)
|
server_statistics.hincrby('credential_by_tld:'+date, tld, 1)
|
||||||
else:
|
else:
|
||||||
publisher.info(to_print)
|
publisher.info(to_print)
|
||||||
|
|
|
@ -29,7 +29,16 @@ def analyse(url, path):
|
||||||
faup.decode(url)
|
faup.decode(url)
|
||||||
url_parsed = faup.get()
|
url_parsed = faup.get()
|
||||||
pprint.pprint(url_parsed)
|
pprint.pprint(url_parsed)
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
resource_path = url_parsed['resource_path'].encode()
|
||||||
|
except:
|
||||||
resource_path = url_parsed['resource_path']
|
resource_path = url_parsed['resource_path']
|
||||||
|
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
query_string = url_parsed['query_string'].encode()
|
||||||
|
except:
|
||||||
query_string = url_parsed['query_string']
|
query_string = url_parsed['query_string']
|
||||||
|
|
||||||
result_path = {'sqli' : False}
|
result_path = {'sqli' : False}
|
||||||
|
@ -56,6 +65,10 @@ def analyse(url, path):
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
||||||
#statistics
|
#statistics
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
tld = url_parsed['tld'].decode()
|
||||||
|
except:
|
||||||
tld = url_parsed['tld']
|
tld = url_parsed['tld']
|
||||||
if tld is not None:
|
if tld is not None:
|
||||||
date = datetime.datetime.now().strftime("%Y%m")
|
date = datetime.datetime.now().strftime("%Y%m")
|
||||||
|
|
|
@ -95,6 +95,10 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
faup.decode(mail)
|
faup.decode(mail)
|
||||||
tld = faup.get()['tld']
|
tld = faup.get()['tld']
|
||||||
|
try:
|
||||||
|
tld = tld.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail])
|
server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -68,10 +68,20 @@ def analyse(url, path):
|
||||||
result_query = 0
|
result_query = 0
|
||||||
|
|
||||||
if resource_path is not None:
|
if resource_path is not None:
|
||||||
result_path = is_sql_injection(resource_path.decode('utf8'))
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
resource_path = resource_path.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
result_path = is_sql_injection(resource_path)
|
||||||
|
|
||||||
if query_string is not None:
|
if query_string is not None:
|
||||||
result_query = is_sql_injection(query_string.decode('utf8'))
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
query_string = query_string.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
result_query = is_sql_injection(query_string)
|
||||||
|
|
||||||
if (result_path > 0) or (result_query > 0):
|
if (result_path > 0) or (result_query > 0):
|
||||||
paste = Paste.Paste(path)
|
paste = Paste.Paste(path)
|
||||||
|
@ -89,6 +99,11 @@ def analyse(url, path):
|
||||||
#statistics
|
#statistics
|
||||||
tld = url_parsed['tld']
|
tld = url_parsed['tld']
|
||||||
if tld is not None:
|
if tld is not None:
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
tld = tld.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
date = datetime.datetime.now().strftime("%Y%m")
|
date = datetime.datetime.now().strftime("%Y%m")
|
||||||
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)
|
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)
|
||||||
|
|
||||||
|
|
16
bin/Web.py
16
bin/Web.py
|
@ -94,18 +94,22 @@ if __name__ == "__main__":
|
||||||
faup.decode(url)
|
faup.decode(url)
|
||||||
domain = faup.get_domain()
|
domain = faup.get_domain()
|
||||||
subdomain = faup.get_subdomain()
|
subdomain = faup.get_subdomain()
|
||||||
f1 = None
|
|
||||||
|
|
||||||
publisher.debug('{} Published'.format(url))
|
publisher.debug('{} Published'.format(url))
|
||||||
|
|
||||||
if f1 == "onion":
|
|
||||||
print(domain)
|
|
||||||
|
|
||||||
if subdomain is not None:
|
if subdomain is not None:
|
||||||
subdomain = subdomain.decode('utf8')
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
subdomain = subdomain.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if domain is not None:
|
if domain is not None:
|
||||||
domain = domain.decode('utf8')
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
domain = domain.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
domains_list.append(domain)
|
domains_list.append(domain)
|
||||||
|
|
||||||
hostl = avoidNone(subdomain) + avoidNone(domain)
|
hostl = avoidNone(subdomain) + avoidNone(domain)
|
||||||
|
|
|
@ -115,7 +115,12 @@ def get_type_domain(domain):
|
||||||
def get_domain_from_url(url):
|
def get_domain_from_url(url):
|
||||||
faup.decode(url)
|
faup.decode(url)
|
||||||
unpack_url = faup.get()
|
unpack_url = faup.get()
|
||||||
domain = unpack_url['domain'].decode()
|
domain = unpack_url['domain']
|
||||||
|
## TODO: FIXME remove me
|
||||||
|
try:
|
||||||
|
domain = domain.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
return domain
|
return domain
|
||||||
|
|
||||||
def get_last_domains_crawled(type):
|
def get_last_domains_crawled(type):
|
||||||
|
@ -418,8 +423,19 @@ def create_spider_splash():
|
||||||
# get service_type
|
# get service_type
|
||||||
faup.decode(url)
|
faup.decode(url)
|
||||||
unpack_url = faup.get()
|
unpack_url = faup.get()
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
domain = unpack_url['domain'].decode()
|
domain = unpack_url['domain'].decode()
|
||||||
if unpack_url['tld'] == b'onion':
|
except:
|
||||||
|
domain = unpack_url['domain']
|
||||||
|
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
|
tld = unpack_url['tld'].decode()
|
||||||
|
except:
|
||||||
|
tld = unpack_url['tld']
|
||||||
|
|
||||||
|
if tld == 'onion':
|
||||||
service_type = 'onion'
|
service_type = 'onion'
|
||||||
else:
|
else:
|
||||||
service_type = 'regular'
|
service_type = 'regular'
|
||||||
|
@ -694,10 +710,19 @@ def show_domain():
|
||||||
port = request.args.get('port')
|
port = request.args.get('port')
|
||||||
faup.decode(domain)
|
faup.decode(domain)
|
||||||
unpack_url = faup.get()
|
unpack_url = faup.get()
|
||||||
|
|
||||||
|
## TODO: # FIXME: remove me
|
||||||
|
try:
|
||||||
domain = unpack_url['domain'].decode()
|
domain = unpack_url['domain'].decode()
|
||||||
|
except:
|
||||||
|
domain = unpack_url['domain']
|
||||||
|
|
||||||
if not port:
|
if not port:
|
||||||
if unpack_url['port']:
|
if unpack_url['port']:
|
||||||
|
try:
|
||||||
port = unpack_url['port'].decode()
|
port = unpack_url['port'].decode()
|
||||||
|
except:
|
||||||
|
port = unpack_url['port']
|
||||||
else:
|
else:
|
||||||
port = 80
|
port = 80
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Reference in New Issue