2019-01-30 14:30:01 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
from zipfile import ZipFile, ZIP_DEFLATED
|
|
|
|
from io import BytesIO
|
|
|
|
import os
|
2020-01-06 15:32:38 +01:00
|
|
|
from pathlib import Path
|
2020-04-22 12:03:10 +02:00
|
|
|
from datetime import datetime, timedelta
|
2020-05-23 03:37:24 +02:00
|
|
|
import json
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2020-06-26 12:07:25 +02:00
|
|
|
from flask import Flask, render_template, request, send_file, redirect, url_for, Response, flash, jsonify
|
2020-01-06 15:32:38 +01:00
|
|
|
from flask_bootstrap import Bootstrap # type: ignore
|
2020-04-01 17:44:06 +02:00
|
|
|
from flask_httpauth import HTTPDigestAuth # type: ignore
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2019-03-29 20:11:44 +01:00
|
|
|
from lookyloo.helpers import get_homedir, update_user_agents, get_user_agents
|
2019-01-30 14:30:01 +01:00
|
|
|
from lookyloo.lookyloo import Lookyloo
|
2020-06-29 11:59:01 +02:00
|
|
|
from lookyloo.exceptions import NoValidHarFile, MissingUUID
|
2020-04-22 14:58:01 +02:00
|
|
|
from .proxied import ReverseProxied
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2020-06-04 18:23:36 +02:00
|
|
|
from typing import Optional, Dict, Any
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2020-04-03 17:51:58 +02:00
|
|
|
import logging
|
|
|
|
|
2020-01-06 15:32:38 +01:00
|
|
|
app: Flask = Flask(__name__)
|
2020-04-22 15:54:02 +02:00
|
|
|
app.wsgi_app = ReverseProxied(app.wsgi_app) # type: ignore
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2020-01-06 15:32:38 +01:00
|
|
|
secret_file_path: Path = get_homedir() / 'secret_key'
|
2019-01-30 14:30:01 +01:00
|
|
|
|
|
|
|
if not secret_file_path.exists() or secret_file_path.stat().st_size < 64:
|
|
|
|
with secret_file_path.open('wb') as f:
|
|
|
|
f.write(os.urandom(64))
|
|
|
|
|
|
|
|
with secret_file_path.open('rb') as f:
|
|
|
|
app.config['SECRET_KEY'] = f.read()
|
|
|
|
|
|
|
|
Bootstrap(app)
|
|
|
|
app.config['BOOTSTRAP_SERVE_LOCAL'] = True
|
|
|
|
app.config['SESSION_COOKIE_NAME'] = 'lookyloo'
|
|
|
|
app.debug = False
|
2020-04-01 17:44:06 +02:00
|
|
|
auth = HTTPDigestAuth()
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2020-03-31 14:12:49 +02:00
|
|
|
lookyloo: Lookyloo = Lookyloo()
|
2019-01-30 14:30:01 +01:00
|
|
|
|
2020-04-01 17:44:06 +02:00
|
|
|
user = lookyloo.get_config('cache_clean_user')
|
2020-04-22 12:03:10 +02:00
|
|
|
time_delta_on_index = lookyloo.get_config('time_delta_on_index')
|
2020-04-01 17:44:06 +02:00
|
|
|
|
2020-04-03 17:51:58 +02:00
|
|
|
logging.basicConfig(level=lookyloo.get_config('loglevel'))
|
|
|
|
|
2020-04-01 17:44:06 +02:00
|
|
|
|
2020-05-23 03:37:24 +02:00
|
|
|
# Method to make sizes in bytes human readable
|
|
|
|
# Source: https://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size
|
|
|
|
def sizeof_fmt(num, suffix='B'):
|
|
|
|
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
|
|
|
if abs(num) < 1024.0:
|
|
|
|
return "%3.1f%s%s" % (num, unit, suffix)
|
|
|
|
num /= 1024.0
|
|
|
|
return "%.1f%s%s" % (num, 'Yi', suffix)
|
|
|
|
|
|
|
|
|
|
|
|
app.jinja_env.globals.update(sizeof_fmt=sizeof_fmt)
|
|
|
|
|
|
|
|
|
2020-06-25 16:43:36 +02:00
|
|
|
@app.after_request
|
|
|
|
def after_request(response):
|
2020-06-25 17:08:11 +02:00
|
|
|
ua = request.headers.get('User-Agent')
|
2020-06-26 10:54:35 +02:00
|
|
|
real_ip = request.headers.get('X-Real-IP')
|
2020-06-25 17:08:11 +02:00
|
|
|
if ua:
|
2020-06-26 10:54:35 +02:00
|
|
|
if real_ip:
|
|
|
|
lookyloo.cache_user_agents(ua, real_ip)
|
|
|
|
else:
|
|
|
|
lookyloo.cache_user_agents(ua, request.remote_addr)
|
2020-06-25 16:43:36 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2020-04-01 17:44:06 +02:00
|
|
|
@auth.get_password
|
2020-05-18 18:32:59 +02:00
|
|
|
def get_pw(username: str) -> Optional[str]:
|
2020-04-01 17:44:06 +02:00
|
|
|
if username in user:
|
|
|
|
return user.get(username)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/rebuild_all')
|
|
|
|
@auth.login_required
|
|
|
|
def rebuild_all():
|
|
|
|
lookyloo.rebuild_all()
|
|
|
|
return redirect(url_for('index'))
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/rebuild_cache')
|
|
|
|
@auth.login_required
|
|
|
|
def rebuild_cache():
|
|
|
|
lookyloo.rebuild_cache()
|
|
|
|
return redirect(url_for('index'))
|
|
|
|
|
|
|
|
|
2020-05-18 18:32:59 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/rebuild')
|
2020-04-01 17:44:06 +02:00
|
|
|
@auth.login_required
|
2020-05-18 18:32:59 +02:00
|
|
|
def rebuild_tree(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
try:
|
|
|
|
lookyloo.remove_pickle(tree_uuid)
|
2020-04-01 17:44:06 +02:00
|
|
|
return redirect(url_for('tree', tree_uuid=tree_uuid))
|
2020-06-29 11:59:01 +02:00
|
|
|
except Exception:
|
|
|
|
return redirect(url_for('index'))
|
2020-04-01 17:44:06 +02:00
|
|
|
|
2019-01-30 14:30:01 +01:00
|
|
|
|
|
|
|
@app.route('/submit', methods=['POST', 'GET'])
|
|
|
|
def submit():
|
|
|
|
to_query = request.get_json(force=True)
|
|
|
|
perma_uuid = lookyloo.enqueue_scrape(to_query)
|
|
|
|
return Response(perma_uuid, mimetype='text/text')
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/scrape', methods=['GET', 'POST'])
|
|
|
|
def scrape_web():
|
|
|
|
if request.form.get('url'):
|
2020-01-24 10:17:41 +01:00
|
|
|
# check if the post request has the file part
|
2020-01-24 11:25:53 +01:00
|
|
|
if 'cookies' in request.files and request.files['cookies'].filename:
|
|
|
|
cookie_file = request.files['cookies'].stream
|
2020-01-24 10:17:41 +01:00
|
|
|
else:
|
|
|
|
cookie_file = None
|
2020-05-18 18:32:59 +02:00
|
|
|
url = request.form.get('url')
|
|
|
|
if url:
|
|
|
|
depth: int = request.form.get('depth') if request.form.get('depth') else 1 # type: ignore
|
|
|
|
listing: bool = request.form.get('listing') if request.form.get('listing') else False # type: ignore
|
|
|
|
perma_uuid = lookyloo.scrape(url=url, cookies_pseudofile=cookie_file,
|
|
|
|
depth=depth, listing=listing,
|
|
|
|
user_agent=request.form.get('user_agent'),
|
|
|
|
os=request.form.get('os'), browser=request.form.get('browser'))
|
|
|
|
return redirect(url_for('tree', tree_uuid=perma_uuid))
|
2020-06-25 17:08:11 +02:00
|
|
|
user_agents: Dict[str, Any] = {}
|
2020-06-25 16:43:36 +02:00
|
|
|
if lookyloo.get_config('use_user_agents_users'):
|
|
|
|
lookyloo.build_ua_file()
|
|
|
|
# NOTE: For now, just generate the file, so we have an idea of the size
|
|
|
|
# user_agents = get_user_agents('own_user_agents')
|
2020-06-25 17:08:11 +02:00
|
|
|
if not user_agents:
|
2020-06-25 16:43:36 +02:00
|
|
|
user_agents = get_user_agents()
|
2019-03-29 20:11:44 +01:00
|
|
|
user_agents.pop('by_frequency')
|
|
|
|
return render_template('scrape.html', user_agents=user_agents)
|
2019-01-30 14:30:01 +01:00
|
|
|
|
|
|
|
|
2020-05-20 19:11:15 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/hostname/<string:node_uuid>/text', methods=['GET'])
|
|
|
|
def hostnode_details_text(tree_uuid: str, node_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
hostnode = lookyloo.get_hostnode_from_tree(tree_uuid, node_uuid)
|
2019-01-30 14:30:01 +01:00
|
|
|
urls = []
|
|
|
|
for url in hostnode.urls:
|
|
|
|
urls.append(url.name)
|
|
|
|
content = '''# URLs
|
|
|
|
|
|
|
|
{}
|
|
|
|
'''.format('\n'.join(urls))
|
|
|
|
to_return = BytesIO(content.encode())
|
|
|
|
to_return.seek(0)
|
|
|
|
return send_file(to_return, mimetype='text/markdown',
|
|
|
|
as_attachment=True, attachment_filename='file.md')
|
|
|
|
|
|
|
|
|
2020-05-20 19:11:15 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/hostname_popup/<string:node_uuid>', methods=['GET'])
|
|
|
|
def hostnode_popup(tree_uuid: str, node_uuid: str):
|
2020-05-23 03:37:24 +02:00
|
|
|
keys_response = {
|
2020-05-19 17:47:55 +02:00
|
|
|
'js': "/static/javascript.png",
|
|
|
|
'exe': "/static/exe.png",
|
|
|
|
'css': "/static/css.png",
|
|
|
|
'font': "/static/font.png",
|
|
|
|
'html': "/static/html.png",
|
|
|
|
'json': "/static/json.png",
|
|
|
|
'iframe': "/static/ifr.png",
|
|
|
|
'image': "/static/img.png",
|
|
|
|
'unknown_mimetype': "/static/wtf.png",
|
|
|
|
'video': "/static/video.png",
|
|
|
|
'response_cookie': "/static/cookie_received.png",
|
|
|
|
'redirect': "/static/redirect.png",
|
|
|
|
'redirect_to_nothing': "/static/cookie_in_url.png"
|
|
|
|
}
|
2020-05-23 03:37:24 +02:00
|
|
|
keys_request = {
|
|
|
|
'request_cookie': "/static/cookie_read.png",
|
|
|
|
}
|
2020-05-19 17:47:55 +02:00
|
|
|
|
2020-06-29 11:59:01 +02:00
|
|
|
hostnode, urls = lookyloo.get_hostnode_investigator(tree_uuid, node_uuid)
|
2020-05-27 12:38:25 +02:00
|
|
|
|
2020-05-19 17:47:55 +02:00
|
|
|
return render_template('hostname_popup.html',
|
2020-05-20 19:11:15 +02:00
|
|
|
tree_uuid=tree_uuid,
|
2020-05-19 17:47:55 +02:00
|
|
|
hostname_uuid=node_uuid,
|
|
|
|
hostname=hostnode.name,
|
|
|
|
urls=urls,
|
2020-05-23 03:37:24 +02:00
|
|
|
keys_response=keys_response,
|
|
|
|
keys_request=keys_request)
|
|
|
|
|
|
|
|
|
2020-05-26 17:45:04 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/url/<string:node_uuid>/request_cookies', methods=['GET'])
|
|
|
|
def urlnode_request_cookies(tree_uuid: str, node_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
urlnode = lookyloo.get_urlnode_from_tree(tree_uuid, node_uuid)
|
2020-05-26 17:45:04 +02:00
|
|
|
if not urlnode.request_cookie:
|
|
|
|
return
|
|
|
|
|
|
|
|
return send_file(BytesIO(json.dumps(urlnode.request_cookie, indent=2).encode()),
|
|
|
|
mimetype='text/plain', as_attachment=True, attachment_filename='request_cookies.txt')
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/tree/<string:tree_uuid>/url/<string:node_uuid>/response_cookies', methods=['GET'])
|
|
|
|
def urlnode_response_cookies(tree_uuid: str, node_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
urlnode = lookyloo.get_urlnode_from_tree(tree_uuid, node_uuid)
|
2020-05-26 17:45:04 +02:00
|
|
|
if not urlnode.response_cookie:
|
|
|
|
return
|
|
|
|
|
|
|
|
return send_file(BytesIO(json.dumps(urlnode.response_cookie, indent=2).encode()),
|
|
|
|
mimetype='text/plain', as_attachment=True, attachment_filename='response_cookies.txt')
|
|
|
|
|
|
|
|
|
2020-05-23 03:37:24 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/url/<string:node_uuid>/posted_data', methods=['GET'])
|
|
|
|
def urlnode_post_request(tree_uuid: str, node_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
urlnode = lookyloo.get_urlnode_from_tree(tree_uuid, node_uuid)
|
2020-05-23 03:37:24 +02:00
|
|
|
if not urlnode.posted_data:
|
|
|
|
return
|
|
|
|
if isinstance(urlnode.posted_data, (dict, list)):
|
|
|
|
# JSON blob, pretty print.
|
|
|
|
posted = json.dumps(urlnode.posted_data, indent=2)
|
|
|
|
else:
|
|
|
|
posted = urlnode.posted_data
|
|
|
|
|
|
|
|
if isinstance(posted, bytes):
|
|
|
|
to_return = BytesIO(posted)
|
|
|
|
else:
|
|
|
|
to_return = BytesIO(posted.encode())
|
|
|
|
to_return.seek(0)
|
|
|
|
return send_file(to_return, mimetype='text/plain',
|
|
|
|
as_attachment=True, attachment_filename='posted_data.txt')
|
2020-05-18 18:35:20 +02:00
|
|
|
|
|
|
|
|
2020-05-20 19:11:15 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/url/<string:node_uuid>', methods=['GET'])
|
|
|
|
def urlnode_details(tree_uuid: str, node_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
urlnode = lookyloo.get_urlnode_from_tree(tree_uuid, node_uuid)
|
2019-01-30 14:30:01 +01:00
|
|
|
to_return = BytesIO()
|
|
|
|
got_content = False
|
|
|
|
if hasattr(urlnode, 'body'):
|
|
|
|
body_content = urlnode.body.getvalue()
|
|
|
|
if body_content:
|
|
|
|
got_content = True
|
|
|
|
with ZipFile(to_return, 'w', ZIP_DEFLATED) as zfile:
|
|
|
|
zfile.writestr(urlnode.filename, urlnode.body.getvalue())
|
|
|
|
if not got_content:
|
|
|
|
with ZipFile(to_return, 'w', ZIP_DEFLATED) as zfile:
|
|
|
|
zfile.writestr('file.txt', b'Response body empty')
|
|
|
|
to_return.seek(0)
|
|
|
|
return send_file(to_return, mimetype='application/zip',
|
|
|
|
as_attachment=True, attachment_filename='file.zip')
|
|
|
|
|
|
|
|
|
2020-04-20 16:41:42 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/trigger_modules/', defaults={'force': False})
|
|
|
|
@app.route('/tree/<string:tree_uuid>/trigger_modules/<int:force>', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def trigger_modules(tree_uuid: str, force: int):
|
2020-06-29 11:59:01 +02:00
|
|
|
lookyloo.trigger_modules(tree_uuid, True if force else False)
|
2020-04-20 16:41:42 +02:00
|
|
|
return redirect(url_for('modules', tree_uuid=tree_uuid))
|
|
|
|
|
|
|
|
|
2020-05-13 17:31:27 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/stats', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def stats(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
stats = lookyloo.get_statistics(tree_uuid)
|
2020-05-13 17:31:27 +02:00
|
|
|
return render_template('statistics.html', uuid=tree_uuid, stats=stats)
|
|
|
|
|
|
|
|
|
2020-04-20 16:41:42 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/modules', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def modules(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
modules_responses = lookyloo.get_modules_responses(tree_uuid)
|
2020-04-20 16:41:42 +02:00
|
|
|
if not modules_responses:
|
|
|
|
return redirect(url_for('tree', tree_uuid=tree_uuid))
|
|
|
|
|
2020-05-18 18:32:59 +02:00
|
|
|
vt_short_result: Dict[str, Dict[str, Any]] = {}
|
2020-04-20 16:41:42 +02:00
|
|
|
if 'vt' in modules_responses:
|
|
|
|
# VirusTotal cleanup
|
|
|
|
vt = modules_responses.pop('vt')
|
|
|
|
# Get malicious entries
|
|
|
|
for url, full_report in vt.items():
|
|
|
|
vt_short_result[url] = {
|
|
|
|
'permaurl': f'https://www.virustotal.com/gui/url/{full_report["id"]}/detection',
|
|
|
|
'malicious': []
|
|
|
|
}
|
|
|
|
for vendor, result in full_report['attributes']['last_analysis_results'].items():
|
|
|
|
if result['category'] == 'malicious':
|
|
|
|
vt_short_result[url]['malicious'].append((vendor, result['result']))
|
|
|
|
|
2020-06-09 15:06:35 +02:00
|
|
|
pi_short_result: Dict[str, str] = {}
|
|
|
|
if 'pi' in modules_responses:
|
|
|
|
pi = modules_responses.pop('pi')
|
|
|
|
for url, full_report in pi.items():
|
|
|
|
if not full_report:
|
|
|
|
continue
|
|
|
|
pi_short_result[url] = full_report['results'][0]['tag_label']
|
|
|
|
|
|
|
|
return render_template('modules.html', uuid=tree_uuid, vt=vt_short_result, pi=pi_short_result)
|
2020-04-20 16:41:42 +02:00
|
|
|
|
|
|
|
|
2019-01-30 14:30:01 +01:00
|
|
|
@app.route('/tree/<string:tree_uuid>/image', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def image(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
to_return = lookyloo.get_screenshot(tree_uuid)
|
2019-01-30 14:30:01 +01:00
|
|
|
return send_file(to_return, mimetype='image/png',
|
|
|
|
as_attachment=True, attachment_filename='image.png')
|
|
|
|
|
|
|
|
|
2020-05-12 16:53:10 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/html', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def html(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
to_return = lookyloo.get_html(tree_uuid)
|
2020-05-12 16:53:10 +02:00
|
|
|
return send_file(to_return, mimetype='text/html',
|
|
|
|
as_attachment=True, attachment_filename='page.html')
|
|
|
|
|
|
|
|
|
2020-05-26 17:45:04 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/cookies', methods=['GET'])
|
|
|
|
def cookies(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
to_return = lookyloo.get_cookies(tree_uuid)
|
2020-05-26 17:45:04 +02:00
|
|
|
return send_file(to_return, mimetype='application/json',
|
|
|
|
as_attachment=True, attachment_filename='cookies.json')
|
|
|
|
|
|
|
|
|
2020-05-12 16:53:10 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/export', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def export(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
to_return = lookyloo.get_capture(tree_uuid)
|
2020-05-12 16:53:10 +02:00
|
|
|
return send_file(to_return, mimetype='application/zip',
|
|
|
|
as_attachment=True, attachment_filename='capture.zip')
|
|
|
|
|
|
|
|
|
2020-03-23 12:45:57 +01:00
|
|
|
@app.route('/redirects/<string:tree_uuid>', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def redirects(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
cache = lookyloo.capture_cache(tree_uuid)
|
2020-05-18 18:32:59 +02:00
|
|
|
if not cache:
|
|
|
|
return Response('Not available.', mimetype='text/text')
|
2020-03-23 12:45:57 +01:00
|
|
|
if not cache['redirects']:
|
|
|
|
return Response('No redirects.', mimetype='text/text')
|
|
|
|
to_return = BytesIO('\n'.join(cache['redirects']).encode())
|
|
|
|
return send_file(to_return, mimetype='text/text',
|
|
|
|
as_attachment=True, attachment_filename='redirects.txt')
|
|
|
|
|
|
|
|
|
2020-03-26 01:56:24 +01:00
|
|
|
@app.route('/cache_tree/<string:tree_uuid>', methods=['GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def cache_tree(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
lookyloo.cache_tree(tree_uuid)
|
2020-03-26 01:56:24 +01:00
|
|
|
return redirect(url_for('index'))
|
|
|
|
|
|
|
|
|
2020-05-11 19:58:46 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/send_mail', methods=['POST', 'GET'])
|
2020-05-18 18:32:59 +02:00
|
|
|
def send_mail(tree_uuid: str):
|
2020-05-27 15:15:37 +02:00
|
|
|
email: str = request.form.get('email') if request.form.get('email') else '' # type: ignore
|
2020-05-18 18:32:59 +02:00
|
|
|
comment: str = request.form.get('comment') if request.form.get('comment') else '' # type: ignore
|
2020-05-27 15:15:37 +02:00
|
|
|
lookyloo.send_mail(tree_uuid, email, comment)
|
2020-05-11 19:01:02 +02:00
|
|
|
return redirect(url_for('tree', tree_uuid=tree_uuid))
|
|
|
|
|
|
|
|
|
2019-01-30 14:30:01 +01:00
|
|
|
@app.route('/tree/<string:tree_uuid>', methods=['GET'])
|
2020-06-29 11:59:01 +02:00
|
|
|
@app.route('/tree/<string:tree_uuid>/<string:urlnode_uuid>', methods=['GET'])
|
|
|
|
def tree(tree_uuid: str, urlnode_uuid: Optional[str]=None):
|
2020-03-17 15:27:04 +01:00
|
|
|
if tree_uuid == 'False':
|
2020-03-23 12:45:57 +01:00
|
|
|
flash("Unable to process your request. The domain may not exist, or splash isn't started", 'error')
|
2020-03-17 15:27:04 +01:00
|
|
|
return redirect(url_for('index'))
|
2020-06-29 11:59:01 +02:00
|
|
|
try:
|
|
|
|
cache = lookyloo.capture_cache(tree_uuid)
|
|
|
|
except MissingUUID:
|
2020-03-17 14:17:18 +01:00
|
|
|
flash(f'Unable to find this UUID ({tree_uuid}). The capture may still be ongoing, try again later.', 'error')
|
|
|
|
return redirect(url_for('index'))
|
|
|
|
|
2020-05-18 18:32:59 +02:00
|
|
|
if not cache:
|
2020-05-26 17:45:04 +02:00
|
|
|
flash('Invalid cache.', 'error')
|
2020-05-18 18:32:59 +02:00
|
|
|
return redirect(url_for('index'))
|
|
|
|
|
2020-03-17 14:17:18 +01:00
|
|
|
if 'error' in cache:
|
|
|
|
flash(cache['error'], 'error')
|
2019-01-30 16:01:55 +01:00
|
|
|
return redirect(url_for('index'))
|
|
|
|
|
2019-02-18 13:52:48 +01:00
|
|
|
try:
|
2020-05-11 19:01:02 +02:00
|
|
|
if lookyloo.get_config('enable_mail_notification'):
|
|
|
|
enable_mail_notification = True
|
|
|
|
else:
|
|
|
|
enable_mail_notification = False
|
2020-06-29 11:59:01 +02:00
|
|
|
tree_json, start_time, user_agent, root_url, meta = lookyloo.load_tree(tree_uuid)
|
2019-02-18 13:52:48 +01:00
|
|
|
return render_template('tree.html', tree_json=tree_json, start_time=start_time,
|
2019-04-07 23:54:16 +02:00
|
|
|
user_agent=user_agent, root_url=root_url, tree_uuid=tree_uuid,
|
2020-06-29 11:59:01 +02:00
|
|
|
meta=meta, enable_mail_notification=enable_mail_notification,
|
|
|
|
urlnode_uuid=urlnode_uuid)
|
|
|
|
|
2019-02-18 13:52:48 +01:00
|
|
|
except NoValidHarFile as e:
|
2019-04-05 14:05:54 +02:00
|
|
|
return render_template('error.html', error_message=e)
|
2019-01-30 14:30:01 +01:00
|
|
|
|
|
|
|
|
2020-05-18 18:32:59 +02:00
|
|
|
def index_generic(show_hidden: bool=False):
|
2019-01-30 14:30:01 +01:00
|
|
|
titles = []
|
2020-04-22 12:03:10 +02:00
|
|
|
if time_delta_on_index:
|
|
|
|
# We want to filter the captures on the index
|
|
|
|
cut_time = datetime.now() - timedelta(**time_delta_on_index)
|
|
|
|
else:
|
2020-05-18 18:32:59 +02:00
|
|
|
cut_time = None # type: ignore
|
2020-06-29 11:59:01 +02:00
|
|
|
for capture_uuid in lookyloo.capture_uuids:
|
|
|
|
cached = lookyloo.capture_cache(capture_uuid)
|
2020-04-23 00:12:10 +02:00
|
|
|
if not cached or 'error' in cached:
|
|
|
|
continue
|
|
|
|
if show_hidden:
|
|
|
|
if 'no_index' not in cached:
|
|
|
|
# Only display the hidden ones
|
|
|
|
continue
|
|
|
|
elif 'no_index' in cached:
|
2019-01-30 14:30:01 +01:00
|
|
|
continue
|
2020-05-18 18:32:59 +02:00
|
|
|
if cut_time and datetime.fromisoformat(cached['timestamp'][:-1]) < cut_time: # type: ignore
|
2020-04-22 12:03:10 +02:00
|
|
|
continue
|
2020-04-08 12:04:29 +02:00
|
|
|
titles.append((cached['uuid'], cached['title'], cached['timestamp'], cached['url'],
|
2020-03-26 01:56:24 +01:00
|
|
|
cached['redirects'], True if cached['incomplete_redirects'] == '1' else False))
|
2020-02-03 18:30:41 +01:00
|
|
|
titles = sorted(titles, key=lambda x: (x[2], x[3]), reverse=True)
|
2019-01-30 14:30:01 +01:00
|
|
|
return render_template('index.html', titles=titles)
|
2020-04-23 00:12:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/', methods=['GET'])
|
|
|
|
def index():
|
|
|
|
if request.method == 'HEAD':
|
|
|
|
# Just returns ack if the webserver is running
|
|
|
|
return 'Ack'
|
|
|
|
update_user_agents()
|
|
|
|
return index_generic()
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/hidden', methods=['GET'])
|
|
|
|
@auth.login_required
|
|
|
|
def index_hidden():
|
|
|
|
return index_generic(show_hidden=True)
|
2020-06-26 12:07:25 +02:00
|
|
|
|
|
|
|
# Query API
|
|
|
|
|
2020-06-29 11:59:01 +02:00
|
|
|
|
2020-06-26 12:07:25 +02:00
|
|
|
@app.route('/json/<string:tree_uuid>/redirects', methods=['GET'])
|
|
|
|
def json_redirects(tree_uuid: str):
|
2020-06-29 11:59:01 +02:00
|
|
|
cache = lookyloo.capture_cache(tree_uuid)
|
2020-06-26 12:07:25 +02:00
|
|
|
if not cache:
|
|
|
|
return {'error': 'UUID missing in cache, try again later.'}
|
|
|
|
|
2020-06-26 18:11:22 +02:00
|
|
|
to_return: Dict[str, Any] = {'response': {'url': cache['url'], 'redirects': []}}
|
2020-06-26 12:07:25 +02:00
|
|
|
if not cache['redirects']:
|
|
|
|
to_return['response']['info'] = 'No redirects'
|
|
|
|
return to_return
|
|
|
|
if cache['incomplete_redirects']:
|
|
|
|
# Trigger tree build, get all redirects
|
2020-06-29 11:59:01 +02:00
|
|
|
lookyloo.load_tree(tree_uuid)
|
|
|
|
cache = lookyloo.capture_cache(tree_uuid)
|
2020-06-26 18:11:22 +02:00
|
|
|
if cache:
|
|
|
|
to_return['response']['redirects'] = cache['redirects']
|
|
|
|
else:
|
|
|
|
to_return['response']['redirects'] = cache['redirects']
|
|
|
|
|
2020-06-26 12:07:25 +02:00
|
|
|
return jsonify(to_return)
|