diff --git a/bin/async_scrape.py b/bin/async_capture.py similarity index 82% rename from bin/async_scrape.py rename to bin/async_capture.py index 375cff2..72b9323 100755 --- a/bin/async_scrape.py +++ b/bin/async_capture.py @@ -13,7 +13,7 @@ logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') -class AsyncScraper(AbstractManager): +class AsyncCapture(AbstractManager): def __init__(self, storage_directory: Optional[Path]=None, loglevel: int=logging.INFO): super().__init__(loglevel) @@ -22,16 +22,16 @@ class AsyncScraper(AbstractManager): self.lookyloo = Lookyloo() def _to_run_forever(self): - set_running('async_scrape') + set_running('async_capture') while True: - url = self.lookyloo.process_scrape_queue() + url = self.lookyloo.process_capture_queue() if url is None or shutdown_requested(): break - unset_running('async_scrape') + unset_running('async_capture') def main(): - m = AsyncScraper() + m = AsyncCapture() m.run(sleep_in_sec=1) diff --git a/bin/start.py b/bin/start.py index 68382de..7269091 100755 --- a/bin/start.py +++ b/bin/start.py @@ -13,7 +13,7 @@ def main(): p.check_returncode() print('done.') print('Start asynchronous ingestor...') - Popen(['async_scrape']) + Popen(['async_capture']) print('done.') print('Start website...') Popen(['start_website']) diff --git a/pyproject.toml b/pyproject.toml index 8248237..b1d2cea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ start_website = "bin.start_website:main" start = "bin.start:main" run_backend = "bin.run_backend:main" -async_scrape = "bin.async_scrape:main" +async_capture = "bin.async_capture:main" shutdown = "bin.shutdown:main" stop = "bin.stop:main" rebuild_caches = "bin.rebuild_caches:main"