fix: [Onion] add timeout signal

pull/333/head
Terrtia 2019-01-29 09:46:03 +01:00
parent f4ba21e492
commit 535f44044d
No known key found for this signature in database
GPG Key ID: 1E1B1F50D84613D0
2 changed files with 30 additions and 9 deletions

View File

@ -29,10 +29,18 @@ import os
import base64
import subprocess
import redis
import signal
import re
from Helper import Process
class TimeoutException(Exception):
pass
def timeout_handler(signum, frame):
raise TimeoutException
signal.signal(signal.SIGALRM, timeout_handler)
def fetch(p, r_cache, urls, domains, path):
failed = []
@ -113,6 +121,8 @@ if __name__ == "__main__":
message = p.get_from_set()
prec_filename = None
max_execution_time = p.config.getint("Onion", "max_execution_time")
# send to crawler:
activate_crawler = p.config.get("Crawler", "activate_crawler")
if activate_crawler == 'True':
@ -140,16 +150,24 @@ if __name__ == "__main__":
urls = []
PST = Paste.Paste(filename)
for x in PST.get_regex(url_regex):
print(x)
# Extracting url with regex
url, s, credential, subdomain, domain, host, port, \
resource_path, query_string, f1, f2, f3, f4 = x
# max execution time on regex
signal.alarm(max_execution_time)
try:
for x in PST.get_regex(url_regex):
print(x)
# Extracting url with regex
url, s, credential, subdomain, domain, host, port, \
resource_path, query_string, f1, f2, f3, f4 = x
if '.onion' in url:
print(url)
domains_list.append(domain)
urls.append(url)
if '.onion' in url:
print(url)
domains_list.append(domain)
urls.append(url)
except TimeoutException:
encoded_list = []
p.incr_module_timeout_statistic()
print ("{0} processing timeout".format(PST.p_path))
continue
'''
for x in PST.get_regex(i2p_regex):

View File

@ -68,6 +68,9 @@ minTopPassList=5
[Curve]
max_execution_time = 90
[Onion]
max_execution_time = 180
[Base64]
path = Base64/
max_execution_time = 60