fix: [api] fix crawler api response

pull/586/head
Terrtia 2022-09-14 10:27:17 +02:00
parent bacedfe643
commit 1372b1ef68
No known key found for this signature in database
GPG Key ID: 1E1B1F50D84613D0
2 changed files with 3 additions and 4 deletions

View File

@ -713,7 +713,7 @@ def send_url_to_crawl_in_queue(crawler_mode, crawler_type, url):
r_serv_onion.sadd(f'auto_crawler_url:{crawler_type}', url)
def add_url_to_crawl_in_queue(url, crawler_mode='manual'): # crawler_type
print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
#print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
r_serv_onion.sadd(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
# CURRENTLY DISABLED
# # add auto crawled url for user UI
@ -750,7 +750,6 @@ def api_add_crawler_task(data, user_id=None):
depth_limit = 0
except ValueError:
return ({'error':'invalid depth limit'}, 400)
print(url, screenshot, har, depth_limit)
return create_crawler_task(url, screenshot=screenshot, har=har, depth_limit=depth_limit, crawler_type='onion')

View File

@ -15,12 +15,12 @@ import datetime
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import Domain
import crawlers
import Import_helper
import Cryptocurrency
import Pgp
import Item
import Paste
import Tag
import Term
import Tracker
@ -568,7 +568,7 @@ def add_crawler_task():
return create_json_response(res[0], res[1])
dict_res = {'url': data['url']}
return create_json_response(dict_res, res[1])
return create_json_response(dict_res, 200)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # IMPORT # # # # # # # # # # # # # # # # # #