fix: [crawler] same capture uuid if a domain is already crawled

pull/604/head
Terrtia 2023-06-22 16:09:18 +02:00
parent 4567c9d400
commit 47e1343187
No known key found for this signature in database
GPG Key ID: 1E1B1F50D84613D0
1 changed files with 2 additions and 1 deletions

View File

@ -1239,7 +1239,8 @@ class CrawlerCapture:
def create(self, task_uuid):
if self.exists():
raise Exception(f'Error: Capture {self.uuid} already exists')
print(f'Capture {self.uuid} already exists') # TODO LOGS
return None
launch_time = int(time.time())
r_crawler.hset(f'crawler:task:{task_uuid}', 'capture', self.uuid)
r_crawler.hset('crawler:captures:tasks', self.uuid, task_uuid)