2020-06-29 11:59:01 +02:00
#!/usr/bin/env python3
2022-12-07 19:38:20 +01:00
import csv
2020-07-08 02:25:15 +02:00
import argparse
2020-06-29 11:59:01 +02:00
import logging
2020-07-08 02:25:15 +02:00
2021-09-07 12:59:31 +02:00
from lookyloo . lookyloo import Indexing , Lookyloo
2022-12-07 19:38:20 +01:00
from lookyloo . helpers import get_captures_dir
2020-06-29 11:59:01 +02:00
logging . basicConfig ( format = ' %(asctime)s %(name)s %(levelname)s : %(message)s ' ,
2021-09-01 10:40:59 +02:00
level = logging . INFO )
2020-06-29 11:59:01 +02:00
2020-10-03 21:19:43 +02:00
def main ( ) :
2020-07-08 02:25:15 +02:00
parser = argparse . ArgumentParser ( description = ' Rebuild the redis cache. ' )
parser . add_argument ( ' --rebuild_pickles ' , default = False , action = ' store_true ' , help = ' Delete and rebuild the pickles. Count 20s/pickle, it can take a very long time. ' )
args = parser . parse_args ( )
2020-06-29 11:59:01 +02:00
lookyloo = Lookyloo ( )
2020-07-08 02:25:15 +02:00
if args . rebuild_pickles :
2020-06-29 11:59:01 +02:00
lookyloo . rebuild_all ( )
else :
lookyloo . rebuild_cache ( )
2020-06-15 16:12:23 +02:00
indexing = Indexing ( )
indexing . clear_indexes ( )
2021-09-27 11:36:27 +02:00
2022-12-07 19:38:20 +01:00
# Initialize lookup_dirs key
for index in get_captures_dir ( ) . rglob ( ' index ' ) :
with index . open ( ' r ' ) as _f :
recent_uuids = { uuid : str ( index . parent / dirname ) for uuid , dirname in csv . reader ( _f ) if ( index . parent / dirname ) . exists ( ) }
if recent_uuids :
lookyloo . redis . hset ( ' lookup_dirs ' , mapping = recent_uuids )
2021-09-27 11:36:27 +02:00
# This call will rebuild all the caches as needed.
lookyloo . sorted_capture_cache ( )
2020-10-03 21:19:43 +02:00
if __name__ == ' __main__ ' :
main ( )