mirror of https://github.com/CIRCL/lookyloo
chg: Improve logging in archiver
parent
2b14d6e9e6
commit
bcfaaec941
|
@ -78,12 +78,16 @@ class Archiver(AbstractManager):
|
||||||
# Recent captures
|
# Recent captures
|
||||||
directories_to_index = {capture_dir.parent.parent for capture_dir in get_captures_dir().rglob('uuid')}
|
directories_to_index = {capture_dir.parent.parent for capture_dir in get_captures_dir().rglob('uuid')}
|
||||||
for directory_to_index in directories_to_index:
|
for directory_to_index in directories_to_index:
|
||||||
|
self.logger.info(f'Updating index for {directory_to_index}')
|
||||||
self._update_index(directory_to_index)
|
self._update_index(directory_to_index)
|
||||||
|
self.logger.info('Recent indexes updated')
|
||||||
|
|
||||||
# Archived captures
|
# Archived captures
|
||||||
directories_to_index = {capture_dir.parent.parent for capture_dir in self.archived_captures_dir.rglob('uuid')}
|
directories_to_index = {capture_dir.parent.parent for capture_dir in self.archived_captures_dir.rglob('uuid')}
|
||||||
for directory_to_index in directories_to_index:
|
for directory_to_index in directories_to_index:
|
||||||
|
self.logger.info(f'Updating index for {directory_to_index}')
|
||||||
self._update_index(directory_to_index)
|
self._update_index(directory_to_index)
|
||||||
|
self.logger.info('Archived indexes updated')
|
||||||
|
|
||||||
def _archive(self):
|
def _archive(self):
|
||||||
archive_interval = timedelta(days=get_config('generic', 'archive'))
|
archive_interval = timedelta(days=get_config('generic', 'archive'))
|
||||||
|
@ -118,6 +122,7 @@ class Archiver(AbstractManager):
|
||||||
self.logger.info('Archiving done.')
|
self.logger.info('Archiving done.')
|
||||||
|
|
||||||
def _compress_hars(self):
|
def _compress_hars(self):
|
||||||
|
self.logger.info('Compressing archived captures')
|
||||||
for index in self.archived_captures_dir.rglob('index'):
|
for index in self.archived_captures_dir.rglob('index'):
|
||||||
with index.open('r') as _f:
|
with index.open('r') as _f:
|
||||||
for uuid, dirname in csv.reader(_f):
|
for uuid, dirname in csv.reader(_f):
|
||||||
|
@ -128,6 +133,7 @@ class Archiver(AbstractManager):
|
||||||
with gzip.open(f'{har}.gz', 'wb') as f_out:
|
with gzip.open(f'{har}.gz', 'wb') as f_out:
|
||||||
shutil.copyfileobj(f_in, f_out)
|
shutil.copyfileobj(f_in, f_out)
|
||||||
har.unlink()
|
har.unlink()
|
||||||
|
self.logger.info('Archived captures compressed')
|
||||||
|
|
||||||
def _load_indexes(self):
|
def _load_indexes(self):
|
||||||
# Initialize archives
|
# Initialize archives
|
||||||
|
@ -138,6 +144,7 @@ class Archiver(AbstractManager):
|
||||||
self.redis.hset('lookup_dirs', mapping=recent_uuids)
|
self.redis.hset('lookup_dirs', mapping=recent_uuids)
|
||||||
else:
|
else:
|
||||||
index.unlink()
|
index.unlink()
|
||||||
|
self.logger.info('Recent indexes loaded')
|
||||||
|
|
||||||
# Initialize archives
|
# Initialize archives
|
||||||
for index in self.archived_captures_dir.rglob('index'):
|
for index in self.archived_captures_dir.rglob('index'):
|
||||||
|
@ -145,8 +152,10 @@ class Archiver(AbstractManager):
|
||||||
archived_uuids: Mapping = {uuid: str(index.parent / dirname) for uuid, dirname in csv.reader(_f) if (index.parent / dirname).exists()}
|
archived_uuids: Mapping = {uuid: str(index.parent / dirname) for uuid, dirname in csv.reader(_f) if (index.parent / dirname).exists()}
|
||||||
if archived_uuids:
|
if archived_uuids:
|
||||||
self.redis.hset('lookup_dirs_archived', mapping=archived_uuids)
|
self.redis.hset('lookup_dirs_archived', mapping=archived_uuids)
|
||||||
|
self.redis.hdel('lookup_dirs', *archived_uuids.keys())
|
||||||
else:
|
else:
|
||||||
index.unlink()
|
index.unlink()
|
||||||
|
self.logger.info('Archived indexes loaded')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
Loading…
Reference in New Issue