diff --git a/bin/Onion.py b/bin/Onion.py index c14a5485..db814631 100755 --- a/bin/Onion.py +++ b/bin/Onion.py @@ -25,14 +25,49 @@ import pprint import time from packages import Paste from pubsublogger import publisher - +import datetime +import os +import base64 +import subprocess from Helper import Process + +def fetch(p, urls, domains, path): + for url, domain in zip(urls, domains): + to_fetch = base64.standard_b64encode(url) + process = subprocess.Popen(["python", './tor_fetcher.py', to_fetch], + stdout=subprocess.PIPE) + while process.poll() is None: + time.sleep(1) + + if process.returncode == 0: + tempfile = process.stdout.read().strip() + with open(tempfile, 'r') as f: + filename = path + domain + content = base64.standard_b64decode(f.read()) + save_path = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "pastes"), + filename) + dirname = os.path.dirname(save_path) + if not os.path.exists(dirname): + os.makedirs(dirname) + with open(save_path, 'w') as ff: + ff.write(content) + p.populate_set_out(save_path) + os.unlink(tempfile) + else: + print 'Failed at downloading', url + print process.stdout.read() + + if __name__ == "__main__": publisher.port = 6380 publisher.channel = "Script" + torclient_host = '127.0.0.1' + torclient_port = 9050 + config_section = 'Onion' p = Process(config_section) @@ -49,7 +84,7 @@ if __name__ == "__main__": # Thanks to Faup project for this regex # https://github.com/stricaud/faup - url_regex = "([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.onion)(\:[0-9]+)*(/($|[a-zA-Z0-9\.\,\?\'\\\+&%\$#\=~_\-]+))*" + url_regex = "((http|https|ftp)\://([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.onion)(\:[0-9]+)*(/($|[a-zA-Z0-9\.\,\?\'\\\+&%\$#\=~_\-]+))*)" while True: if message is not None: @@ -59,14 +94,16 @@ if __name__ == "__main__": # "For each new paste" if prec_filename is None or filename != prec_filename: domains_list = [] + urls = [] PST = Paste.Paste(filename) for x in PST.get_regex(url_regex): # Extracting url with regex - credential, subdomain, domain, host, tld, port, \ + url, s, credential, subdomain, domain, host, port, \ resource_path, query_string, f1, f2, f3, f4 = x domains_list.append(domain) + urls.append(url) # Saving the list of extracted onion domains. PST.__setattr__(channel, domains_list) @@ -76,16 +113,21 @@ if __name__ == "__main__": to_print = 'Onion;{};{};{};'.format(PST.p_source, PST.p_date, PST.p_name) if len(domains_list) > 0: + publisher.warning('{}Detected {} .onion(s)'.format( to_print, len(domains_list))) + now = datetime.datetime.now() + path = os.path.join('onions', str(now.year).zfill(4), + str(now.month).zfill(2), + str(now.day).zfill(2), + str(int(time.mktime(now.utctimetuple())))) + fetch(p, urls, domains_list, path) else: publisher.info('{}Onion related'.format(to_print)) prec_filename = filename - else: publisher.debug("Script url is Idling 10s") print 'Sleeping' time.sleep(10) - message = p.get_from_set() diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index b58e25f6..e6241f40 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -34,7 +34,7 @@ subscribe = Redis_Mail [Onion] subscribe = Redis_Onion -#publish = Redis_Global +publish = Redis_Global [Web] subscribe = Redis_Web diff --git a/bin/tor_fetcher.py b/bin/tor_fetcher.py new file mode 100644 index 00000000..305cd3f5 --- /dev/null +++ b/bin/tor_fetcher.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python2 +# -*-coding:UTF-8 -* + +import socks +import socket +import urllib2 +import StringIO +import gzip +import base64 +import sys +import tempfile + +def create_connection(address, timeout=None, source_address=None): + sock = socks.socksocket() + sock.connect(address) + return sock + + +def get_page(url, torclient_host='127.0.0.1', torclient_port=9050): + + request = urllib2.Request(url) + # UA of the Tor browser bundle + request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:24.0) Gecko/20100101 Firefox/24.0') + return urllib2.urlopen(request).read() + + +def makegzip64(s): + out = StringIO.StringIO() + with gzip.GzipFile(fileobj=out, mode="w") as f: + f.write(s) + return base64.standard_b64encode(out.getvalue()) + + +if __name__ == "__main__": + + if len(sys.argv) != 2: + print('usage:', 'tor_fetcher.py', 'URL (base64 encoded)') + exit(1) + + try: + url = base64.standard_b64decode(sys.argv[1]) + except: + print('unable to decode') + exit(1) + + torclient_host = '127.0.0.1' + torclient_port = 9050 + # Setup Proxy + socks.set_default_proxy(socks.SOCKS5, torclient_host, torclient_port, True) + socket.socket = socks.socksocket + socket.create_connection = create_connection + + try: + page = get_page(url) + except: + print('unable to fetch') + exit(1) + + to_write = makegzip64(page) + t, path = tempfile.mkstemp() + with open(path, 'w') as f: + f.write(to_write) + print path + exit(0) diff --git a/pip_packages_requirement.txt b/pip_packages_requirement.txt index 75d5d866..0fb95845 100644 --- a/pip_packages_requirement.txt +++ b/pip_packages_requirement.txt @@ -32,6 +32,9 @@ whoosh ipaddress pycountry +# To fetch Onion urls +PySocks + #ASN lookup requirements http://adns-python.googlecode.com/files/adns-python-1.2.1.tar.gz https://github.com/trolldbois/python-cymru-services/archive/master.zip