diff --git a/bin/lib/crawlers.py b/bin/lib/crawlers.py index 932938b2..db34b96d 100755 --- a/bin/lib/crawlers.py +++ b/bin/lib/crawlers.py @@ -713,7 +713,7 @@ def send_url_to_crawl_in_queue(crawler_mode, crawler_type, url): r_serv_onion.sadd(f'auto_crawler_url:{crawler_type}', url) def add_url_to_crawl_in_queue(url, crawler_mode='manual'): # crawler_type - print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') + #print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') r_serv_onion.sadd(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') # CURRENTLY DISABLED # # add auto crawled url for user UI @@ -750,7 +750,6 @@ def api_add_crawler_task(data, user_id=None): depth_limit = 0 except ValueError: return ({'error':'invalid depth limit'}, 400) - print(url, screenshot, har, depth_limit) return create_crawler_task(url, screenshot=screenshot, har=har, depth_limit=depth_limit, crawler_type='onion') diff --git a/var/www/modules/restApi/Flask_restApi.py b/var/www/modules/restApi/Flask_restApi.py index 70600e66..d1ee2438 100644 --- a/var/www/modules/restApi/Flask_restApi.py +++ b/var/www/modules/restApi/Flask_restApi.py @@ -15,12 +15,12 @@ import datetime sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) import Domain +import crawlers import Import_helper import Cryptocurrency import Pgp import Item -import Paste import Tag import Term import Tracker @@ -568,7 +568,7 @@ def add_crawler_task(): return create_json_response(res[0], res[1]) dict_res = {'url': data['url']} - return create_json_response(dict_res, res[1]) + return create_json_response(dict_res, 200) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # IMPORT # # # # # # # # # # # # # # # # # #