lookyloo/bin/async_scrape.py

38 lines
1.2 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pathlib import Path
import logging
from lookyloo.abstractmanager import AbstractManager
2019-04-05 16:12:54 +02:00
from lookyloo.helpers import get_homedir, set_running, unset_running, shutdown_requested
2019-01-30 14:30:01 +01:00
from lookyloo.lookyloo import Lookyloo
logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s',
level=logging.INFO, datefmt='%I:%M:%S')
# Set it to True if your instance is publicly available so users aren't able to scan your internal network
only_global_lookups = False
class AsyncScraper(AbstractManager):
def __init__(self, storage_directory: Path=None, loglevel: int=logging.INFO):
super().__init__(loglevel)
if not storage_directory:
self.storage_directory = get_homedir() / 'scraped'
self.lookyloo = Lookyloo(loglevel=loglevel, only_global_lookups=only_global_lookups)
def _to_run_forever(self):
set_running('async_scrape')
2019-04-05 16:12:54 +02:00
while True:
url = self.lookyloo.process_scrape_queue()
if url is None or shutdown_requested():
break
unset_running('async_scrape')
if __name__ == '__main__':
m = AsyncScraper()
m.run(sleep_in_sec=1)