2015-03-11 18:15:56 +01:00
|
|
|
#!/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
import json
|
|
|
|
import requests
|
|
|
|
import time
|
2019-01-11 14:00:25 +01:00
|
|
|
from urllib.parse import urljoin
|
2015-03-11 18:15:56 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PyURLAbuse(object):
|
|
|
|
|
2019-01-11 14:00:25 +01:00
|
|
|
def __init__(self, url='https://www.circl.lu/urlabuse/'):
|
2015-03-11 18:15:56 +01:00
|
|
|
self.url = url
|
|
|
|
|
|
|
|
self.session = requests.Session()
|
|
|
|
self.session.headers.update({'content-type': 'application/json'})
|
|
|
|
|
2019-01-11 14:00:25 +01:00
|
|
|
@property
|
|
|
|
def is_up(self):
|
|
|
|
r = self.session.head(self.root_url)
|
|
|
|
return r.status_code == 200
|
|
|
|
|
2015-03-11 18:15:56 +01:00
|
|
|
def get_result(self, job_id):
|
2019-01-15 16:20:38 +01:00
|
|
|
response = self.session.get(urljoin(self.url, '_result/{}'.format(job_id)))
|
2015-03-11 18:15:56 +01:00
|
|
|
if response.status_code == 202:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return response.json()
|
|
|
|
|
|
|
|
def _async(self, path, query):
|
2019-01-11 14:00:25 +01:00
|
|
|
response = self.session.post(urljoin(self.url, path), data=json.dumps(query))
|
2015-03-11 18:15:56 +01:00
|
|
|
return response.text
|
|
|
|
|
|
|
|
def start(self, q):
|
|
|
|
query = {'url': q}
|
|
|
|
return self._async('start', query)
|
|
|
|
|
|
|
|
def urls(self, q):
|
|
|
|
query = {'url': q}
|
|
|
|
return self._async('urls', query)
|
|
|
|
|
|
|
|
def resolve(self, q):
|
|
|
|
query = {'url': q}
|
|
|
|
return self._async('resolve', query)
|
|
|
|
|
|
|
|
def phishtank(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('phishtank', query)
|
|
|
|
|
|
|
|
def virustotal(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('virustotal_report', query)
|
|
|
|
|
|
|
|
def googlesafebrowsing(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('googlesafebrowsing', query)
|
|
|
|
|
|
|
|
def urlquery(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('urlquery', query)
|
|
|
|
|
|
|
|
def ticket(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('ticket', query)
|
|
|
|
|
|
|
|
def whoismail(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('whois', query)
|
|
|
|
|
|
|
|
def pdnscircl(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('pdnscircl', query)
|
|
|
|
|
|
|
|
def bgpr(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('bgpranking', query)
|
|
|
|
|
|
|
|
def sslcircl(self, q):
|
|
|
|
query = {'query': q}
|
|
|
|
return self._async('psslcircl', query)
|
|
|
|
|
2019-01-14 16:59:10 +01:00
|
|
|
def make_mail_template(self, results):
|
|
|
|
content = []
|
|
|
|
|
|
|
|
for result in results:
|
|
|
|
url = list(result.keys())[0]
|
|
|
|
details = list(result.values())[0]
|
|
|
|
content.append(url)
|
|
|
|
if 'googlesafebrowsing' in details:
|
|
|
|
content.append('\tKnown as malicious on Google Safe Browsing: {}'.format(details.get('googlesafebrowsing')))
|
|
|
|
|
|
|
|
if 'phishtank' in details:
|
|
|
|
content.append('\tKnown as on PhishTank: {}'.format(details.get('phishtank')))
|
|
|
|
|
|
|
|
if 'vt' in details and details.get('vt'):
|
|
|
|
vt_res = details.get('vt')
|
|
|
|
if int(vt_res[2]) != 0:
|
|
|
|
content.append('\tVirusTotal positive detections: {} out of {}'.format(vt_res[2], vt_res[3]))
|
|
|
|
|
|
|
|
# IPs
|
|
|
|
if 'dns' not in details:
|
|
|
|
content.append('No DNS resolutions.')
|
|
|
|
continue
|
|
|
|
for ip_list in details['dns']:
|
|
|
|
if not ip_list:
|
|
|
|
continue
|
|
|
|
for ip in ip_list:
|
|
|
|
ip_details = details[ip]
|
|
|
|
content.append('\t' + ip)
|
|
|
|
if 'bgpranking' in ip_details:
|
|
|
|
content.append('\t\t is announced by {} ({}). Position {}/{}.'.format(
|
|
|
|
ip_details['bgpranking'][2], ip_details['bgpranking'][0], ip_details['bgpranking'][4],
|
|
|
|
ip_details['bgpranking'][5]))
|
|
|
|
if ip_details.get('virustotal'):
|
|
|
|
res = ip_details.get('virustotal')
|
|
|
|
if res[0] == 1 and int(res[1]) != 0:
|
|
|
|
content.append('\t\tVirusTotal positive detections: {} out of {}'.format(res[1], res[2]))
|
|
|
|
return '\n\n '.join(content)
|
|
|
|
|
2019-01-15 16:20:38 +01:00
|
|
|
def run_query(self, q, with_digest=False):
|
|
|
|
cached = self.get_cache(q, with_digest)
|
|
|
|
if len(cached['result']) > 0:
|
|
|
|
cached['info'] = 'Used cached content'
|
|
|
|
return cached
|
2015-03-11 18:15:56 +01:00
|
|
|
job_id = self.urls(q)
|
|
|
|
all_urls = None
|
|
|
|
while True:
|
|
|
|
all_urls = self.get_result(job_id)
|
|
|
|
if all_urls is None:
|
|
|
|
time.sleep(.5)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
res = {}
|
|
|
|
for u in all_urls:
|
|
|
|
res[u] = self.resolve(u)
|
|
|
|
self.phishtank(u)
|
|
|
|
self.virustotal(u)
|
|
|
|
self.googlesafebrowsing(u)
|
|
|
|
self.urlquery(u)
|
|
|
|
self.ticket(u)
|
|
|
|
self.whoismail(u)
|
|
|
|
|
|
|
|
waiting = True
|
|
|
|
done = []
|
|
|
|
while waiting:
|
|
|
|
waiting = False
|
2019-01-11 14:00:25 +01:00
|
|
|
for u, job_id in res.items():
|
2015-03-11 18:15:56 +01:00
|
|
|
if job_id in done:
|
|
|
|
continue
|
|
|
|
ips = self.get_result(job_id)
|
|
|
|
if ips is not None:
|
|
|
|
done.append(job_id)
|
|
|
|
v4, v6 = ips
|
|
|
|
if v4 is not None:
|
|
|
|
for ip in v4:
|
|
|
|
self.phishtank(ip)
|
|
|
|
self.bgpr(ip)
|
|
|
|
self.urlquery(ip)
|
|
|
|
self.pdnscircl(ip)
|
|
|
|
self.sslcircl(ip)
|
|
|
|
self.ticket(ip)
|
|
|
|
self.whoismail(ip)
|
|
|
|
if v6 is not None:
|
|
|
|
for ip in v6:
|
|
|
|
self.phishtank(ip)
|
2019-01-14 16:59:10 +01:00
|
|
|
self.bgpr(ip)
|
2015-03-11 18:15:56 +01:00
|
|
|
self.urlquery(ip)
|
|
|
|
self.pdnscircl(ip)
|
|
|
|
self.ticket(ip)
|
|
|
|
self.whoismail(ip)
|
|
|
|
waiting = True
|
|
|
|
time.sleep(.5)
|
|
|
|
time.sleep(1)
|
2019-01-15 16:20:38 +01:00
|
|
|
cached = self.get_cache(q, with_digest)
|
|
|
|
cached['info'] = 'New query, all the details may not be available.'
|
|
|
|
return cached
|
2015-03-11 18:15:56 +01:00
|
|
|
|
2019-01-15 16:20:38 +01:00
|
|
|
def get_cache(self, q, digest=False):
|
|
|
|
query = {'query': q, 'digest': digest}
|
2019-01-11 14:00:25 +01:00
|
|
|
response = self.session.post(urljoin(self.url, 'get_cache'), data=json.dumps(query))
|
2015-03-11 18:15:56 +01:00
|
|
|
return response.json()
|