From 1c2cdad38b3d4ccb54294439579a65fb5cade121 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Wed, 1 Apr 2020 17:44:06 +0200 Subject: [PATCH] new: Allow admin to rebuild cache --- config/generic.json.sample | 4 +++- lookyloo/lookyloo.py | 13 +++++++++++++ poetry.lock | 20 +++++++++++++++++++- pyproject.toml | 1 + website/web/__init__.py | 35 +++++++++++++++++++++++++++++++++++ 5 files changed, 71 insertions(+), 2 deletions(-) diff --git a/config/generic.json.sample b/config/generic.json.sample index a2c055b..b2ddb47 100644 --- a/config/generic.json.sample +++ b/config/generic.json.sample @@ -2,8 +2,10 @@ "loglevel": "INFO", "only_global_lookups": true, "splash_url": "http://127.0.0.1:8050", + "cache_clean_user": {}, "_notes": { "only_global_lookups": "Set it to True if your instance is publicly available so users aren't able to scan your internal network", - "loglevel": "Can be one of the value listed here: https://docs.python.org/3/library/logging.html#levels" + "loglevel": "Can be one of the value listed here: https://docs.python.org/3/library/logging.html#levels", + "cache_clean_user": "Format: {username: password}" } } diff --git a/lookyloo/lookyloo.py b/lookyloo/lookyloo.py index 59bae64..4736c95 100644 --- a/lookyloo/lookyloo.py +++ b/lookyloo/lookyloo.py @@ -70,6 +70,19 @@ class Lookyloo(): else: self.use_sane_js = True + def rebuild_cache(self): + self.redis.flushdb() + self._init_existing_dumps() + + def remove_pickle(self, capture_dir: Path): + if (capture_dir / 'tree.pickle').exists(): + (capture_dir / 'tree.pickle').unlink() + + def rebuild_all(self): + for capture_dir in self.capture_dirs: + self.remove_pickle(capture_dir) + self.rebuild_cache() + def get_config(self, entry: str) -> Any: """Get an entry from the generic config file. Automatic fallback to the sample file""" if 'generic' in self.configs: diff --git a/poetry.lock b/poetry.lock index 95983f3..b660e31 100644 --- a/poetry.lock +++ b/poetry.lock @@ -269,6 +269,17 @@ dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxco docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"] dotenv = ["python-dotenv"] +[[package]] +category = "main" +description = "Basic and Digest HTTP authentication for Flask routes" +name = "flask-httpauth" +optional = false +python-versions = "*" +version = "3.3.0" + +[package.dependencies] +Flask = "*" + [[package]] category = "main" description = "WSGI HTTP Server for UNIX" @@ -305,6 +316,7 @@ six = "^1.14.0" reference = "9ec997b0f1c09d3034fd861fd17d972da479f505" type = "git" url = "https://github.com/viper-framework/har2tree.git" + [[package]] category = "main" description = "A featureful, immutable, and correct URL for Python." @@ -744,6 +756,7 @@ requests = "^2.22.0" reference = "3ea143f44d37ab701c70ffb38408528ddb4e2b6e" type = "git" url = "https://github.com/CIRCL/PySaneJS.git" + [[package]] category = "main" description = "pytest: simple powerful testing with Python" @@ -884,6 +897,7 @@ scrapy-splash = "^0.7.2" reference = "300ee49cb21784514dd2a35b374ce06c4b7f04a6" type = "git" url = "https://github.com/viper-framework/ScrapySplashWrapper.git" + [[package]] category = "main" description = "Service identity verification for pyOpenSSL & cryptography." @@ -1082,7 +1096,7 @@ test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] -content-hash = "4294a7aa675b6d425096a74ce78486ed9f3c4a50e2549837dfc86c389290fa68" +content-hash = "23715c6f42d23f07b8be767480974259f8ac2e99a209d04cb076b5d2b6d0e6b4" python-versions = "^3.6" [metadata.files] @@ -1265,6 +1279,10 @@ flask = [ {file = "Flask-1.1.1-py2.py3-none-any.whl", hash = "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6"}, {file = "Flask-1.1.1.tar.gz", hash = "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52"}, ] +flask-httpauth = [ + {file = "Flask-HTTPAuth-3.3.0.tar.gz", hash = "sha256:6ef8b761332e780f9ff74d5f9056c2616f52babc1998b01d9f361a1e439e61b9"}, + {file = "Flask_HTTPAuth-3.3.0-py2.py3-none-any.whl", hash = "sha256:0149953720489407e51ec24bc2f86273597b7973d71cd51f9443bd0e2a89bd72"}, +] gunicorn = [ {file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"}, {file = "gunicorn-20.0.4.tar.gz", hash = "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626"}, diff --git a/pyproject.toml b/pyproject.toml index 644f8d8..cd2f0d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ bootstrap-flask = "^1.2.0" cloudscraper = "^1.2.20" defang = "^0.5.3" vt-py = "^0.5.2" +Flask-HTTPAuth = "^3.3.0" [tool.poetry.dev-dependencies] mypy = "^0.761" diff --git a/website/web/__init__.py b/website/web/__init__.py index 6516e44..9f91019 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -10,6 +10,7 @@ from pathlib import Path from flask import Flask, render_template, request, session, send_file, redirect, url_for, Response, flash from flask_bootstrap import Bootstrap # type: ignore +from flask_httpauth import HTTPDigestAuth # type: ignore from lookyloo.helpers import get_homedir, update_user_agents, get_user_agents from lookyloo.lookyloo import Lookyloo @@ -32,9 +33,43 @@ Bootstrap(app) app.config['BOOTSTRAP_SERVE_LOCAL'] = True app.config['SESSION_COOKIE_NAME'] = 'lookyloo' app.debug = False +auth = HTTPDigestAuth() lookyloo: Lookyloo = Lookyloo() +user = lookyloo.get_config('cache_clean_user') + + +@auth.get_password +def get_pw(username): + if username in user: + return user.get(username) + return None + + +@app.route('/rebuild_all') +@auth.login_required +def rebuild_all(): + lookyloo.rebuild_all() + return redirect(url_for('index')) + + +@app.route('/rebuild_cache') +@auth.login_required +def rebuild_cache(): + lookyloo.rebuild_cache() + return redirect(url_for('index')) + + +@app.route('/tree//rebuild') +@auth.login_required +def rebuild_tree(tree_uuid): + capture_dir = lookyloo.lookup_capture_dir(tree_uuid) + if capture_dir: + lookyloo.remove_pickle(capture_dir) + return redirect(url_for('tree', tree_uuid=tree_uuid)) + return redirect(url_for('index')) + # keep def load_tree(capture_dir: Path) -> Tuple[dict, str, str, str, dict]: