fix: Rename scrape -> capture in async

pull/122/head
Raphaël Vinot 2020-11-05 14:14:33 +01:00
parent c7abc5df23
commit ea052c7c12
3 changed files with 7 additions and 7 deletions

View File

@ -13,7 +13,7 @@ logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s',
level=logging.INFO, datefmt='%I:%M:%S')
class AsyncScraper(AbstractManager):
class AsyncCapture(AbstractManager):
def __init__(self, storage_directory: Optional[Path]=None, loglevel: int=logging.INFO):
super().__init__(loglevel)
@ -22,16 +22,16 @@ class AsyncScraper(AbstractManager):
self.lookyloo = Lookyloo()
def _to_run_forever(self):
set_running('async_scrape')
set_running('async_capture')
while True:
url = self.lookyloo.process_scrape_queue()
url = self.lookyloo.process_capture_queue()
if url is None or shutdown_requested():
break
unset_running('async_scrape')
unset_running('async_capture')
def main():
m = AsyncScraper()
m = AsyncCapture()
m.run(sleep_in_sec=1)

View File

@ -13,7 +13,7 @@ def main():
p.check_returncode()
print('done.')
print('Start asynchronous ingestor...')
Popen(['async_scrape'])
Popen(['async_capture'])
print('done.')
print('Start website...')
Popen(['start_website'])

View File

@ -23,7 +23,7 @@ classifiers = [
start_website = "bin.start_website:main"
start = "bin.start:main"
run_backend = "bin.run_backend:main"
async_scrape = "bin.async_scrape:main"
async_capture = "bin.async_capture:main"
shutdown = "bin.shutdown:main"
stop = "bin.stop:main"
rebuild_caches = "bin.rebuild_caches:main"