2021-03-12 16:53:00 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import logging
|
2021-09-24 12:02:28 +02:00
|
|
|
import os
|
2021-05-27 03:25:06 +02:00
|
|
|
from datetime import datetime, timedelta
|
2021-03-12 16:53:00 +01:00
|
|
|
|
2021-10-18 13:06:43 +02:00
|
|
|
from lookyloo.default import AbstractManager
|
2021-09-07 12:59:31 +02:00
|
|
|
from lookyloo.exceptions import MissingUUID, NoValidHarFile
|
2021-03-12 16:53:00 +01:00
|
|
|
from lookyloo.lookyloo import Lookyloo
|
|
|
|
|
|
|
|
logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s',
|
2021-09-01 10:40:59 +02:00
|
|
|
level=logging.INFO)
|
2021-03-12 16:53:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
class BackgroundIndexer(AbstractManager):
|
|
|
|
|
|
|
|
def __init__(self, loglevel: int=logging.INFO):
|
|
|
|
super().__init__(loglevel)
|
|
|
|
self.lookyloo = Lookyloo()
|
2021-04-09 14:33:40 +02:00
|
|
|
self.script_name = 'background_indexer'
|
2021-03-12 16:53:00 +01:00
|
|
|
# make sure discarded captures dir exists
|
|
|
|
self.discarded_captures_dir = self.lookyloo.capture_dir.parent / 'discarded_captures'
|
|
|
|
self.discarded_captures_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
def _to_run_forever(self):
|
|
|
|
self._build_missing_pickles()
|
|
|
|
self._check_indexes()
|
2021-09-24 12:02:28 +02:00
|
|
|
self.lookyloo.update_tree_cache_info(os.getpid(), self.script_name)
|
2021-03-12 16:53:00 +01:00
|
|
|
|
|
|
|
def _build_missing_pickles(self):
|
2021-08-26 15:49:19 +02:00
|
|
|
for uuid_path in sorted(self.lookyloo.capture_dir.glob('**/uuid'), reverse=True):
|
2021-03-12 16:53:00 +01:00
|
|
|
if (uuid_path.parent / 'tree.pickle').exists():
|
|
|
|
continue
|
2021-05-27 03:25:06 +02:00
|
|
|
lock_file = uuid_path.parent / 'lock'
|
|
|
|
if lock_file.exists():
|
|
|
|
try:
|
|
|
|
with lock_file.open('r') as f:
|
|
|
|
lock_ts = datetime.fromisoformat(f.read())
|
|
|
|
if lock_ts < datetime.now() - timedelta(minutes=5):
|
|
|
|
# Clear old locks. They shouldn't be there, but it's gonna happen.
|
|
|
|
self.logger.info(f'Old lock found {lock_file}, removing it.')
|
|
|
|
lock_file.unlink(missing_ok=True)
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.info(f'Error while reading lock {lock_file}: {e}')
|
|
|
|
continue
|
|
|
|
|
2021-03-12 16:53:00 +01:00
|
|
|
with uuid_path.open() as f:
|
|
|
|
uuid = f.read()
|
2021-08-20 17:46:22 +02:00
|
|
|
if not self.lookyloo.redis.hexists('lookup_dirs', uuid):
|
|
|
|
# The capture with this UUID exists, but it is for some reason missing in lookup_dirs
|
|
|
|
self.lookyloo.redis.hset('lookup_dirs', uuid, str(uuid_path.parent))
|
|
|
|
|
2021-03-12 16:53:00 +01:00
|
|
|
try:
|
|
|
|
self.logger.info(f'Build pickle for {uuid}: {uuid_path.parent.name}')
|
|
|
|
self.lookyloo.get_crawled_tree(uuid)
|
2021-05-20 00:12:35 +02:00
|
|
|
self.lookyloo.trigger_modules(uuid, auto_trigger=True)
|
2021-03-20 21:54:46 +01:00
|
|
|
self.logger.info(f'Pickle for {uuid} build.')
|
2021-07-14 11:34:10 +02:00
|
|
|
except MissingUUID:
|
2021-08-20 17:46:22 +02:00
|
|
|
self.logger.warning(f'Unable to find {uuid}. That should not happen.')
|
2021-03-12 16:53:00 +01:00
|
|
|
except NoValidHarFile:
|
|
|
|
self.logger.warning(f'Unable to build pickle for {uuid}: {uuid_path.parent.name}')
|
|
|
|
# The capture is not working, moving it away.
|
|
|
|
self.lookyloo.redis.hdel('lookup_dirs', uuid)
|
|
|
|
uuid_path.parent.rename(self.discarded_captures_dir / uuid_path.parent.name)
|
|
|
|
|
|
|
|
def _check_indexes(self):
|
2021-08-18 18:01:04 +02:00
|
|
|
index_redis = self.lookyloo.indexing.redis
|
2021-03-20 01:13:37 +01:00
|
|
|
for cache in self.lookyloo.sorted_capture_cache():
|
2021-03-12 16:53:00 +01:00
|
|
|
if self.lookyloo.is_public_instance and cache.no_index:
|
|
|
|
# Capture unindexed
|
|
|
|
continue
|
2021-08-18 18:01:04 +02:00
|
|
|
p = index_redis.pipeline()
|
2021-03-20 01:13:37 +01:00
|
|
|
p.sismember('indexed_urls', cache.uuid)
|
|
|
|
p.sismember('indexed_body_hashes', cache.uuid)
|
|
|
|
p.sismember('indexed_cookies', cache.uuid)
|
2021-03-12 16:53:00 +01:00
|
|
|
indexed = p.execute()
|
|
|
|
if all(indexed):
|
|
|
|
continue
|
|
|
|
try:
|
2021-03-20 01:13:37 +01:00
|
|
|
ct = self.lookyloo.get_crawled_tree(cache.uuid)
|
2021-03-12 16:53:00 +01:00
|
|
|
except NoValidHarFile:
|
2021-03-20 01:13:37 +01:00
|
|
|
self.logger.warning(f'Broken pickle for {cache.uuid}')
|
|
|
|
self.lookyloo.remove_pickle(cache.uuid)
|
2021-03-12 16:53:00 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if not indexed[0]:
|
2021-03-20 01:13:37 +01:00
|
|
|
self.logger.info(f'Indexing urls for {cache.uuid}')
|
2021-03-12 16:53:00 +01:00
|
|
|
self.lookyloo.indexing.index_url_capture(ct)
|
|
|
|
if not indexed[1]:
|
2021-03-20 01:13:37 +01:00
|
|
|
self.logger.info(f'Indexing resources for {cache.uuid}')
|
2021-03-12 16:53:00 +01:00
|
|
|
self.lookyloo.indexing.index_body_hashes_capture(ct)
|
|
|
|
if not indexed[2]:
|
2021-03-20 01:13:37 +01:00
|
|
|
self.logger.info(f'Indexing cookies for {cache.uuid}')
|
2021-03-12 16:53:00 +01:00
|
|
|
self.lookyloo.indexing.index_cookies_capture(ct)
|
2021-09-22 17:09:04 +02:00
|
|
|
# NOTE: categories aren't taken in account here, should be fixed(?)
|
|
|
|
# see indexing.index_categories_capture(capture_uuid, categories)
|
2021-03-12 16:53:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
i = BackgroundIndexer()
|
|
|
|
i.run(sleep_in_sec=60)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|