chg: Fix typing

pull/67/head
Raphaël Vinot 2020-01-24 11:25:53 +01:00
parent f1309ce5e3
commit c9cbf27acc
4 changed files with 8 additions and 8 deletions

View File

@ -2,7 +2,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import os import os
from typing import List, Optional from typing import List, Optional
from io import BytesIO from io import BufferedIOBase
from pathlib import Path from pathlib import Path
from .exceptions import MissingEnv, CreateDirectoryException from .exceptions import MissingEnv, CreateDirectoryException
from redis import Redis from redis import Redis
@ -144,7 +144,7 @@ def get_user_agents() -> dict:
with open(paths[0]) as f: with open(paths[0]) as f:
return json.load(f) return json.load(f)
def load_cookies(cookie_pseudofile: Optional[BytesIO]=None) -> List[dict]: def load_cookies(cookie_pseudofile: Optional[BufferedIOBase]=None) -> List[dict]:
if cookie_pseudofile: if cookie_pseudofile:
cookies = json.load(cookie_pseudofile) cookies = json.load(cookie_pseudofile)
else: else:

View File

@ -15,7 +15,7 @@ import ipaddress
import socket import socket
from urllib.parse import urlsplit from urllib.parse import urlsplit
from io import BytesIO from io import BufferedIOBase, BytesIO
import base64 import base64
from uuid import uuid4 from uuid import uuid4
@ -162,7 +162,7 @@ class Lookyloo():
return self.sanejs.sha512(sha512) return self.sanejs.sha512(sha512)
return {'response': []} return {'response': []}
def scrape(self, url: str, cookies_pseudofile: Optional[BytesIO]=None, depth: int=1, listing: bool=True, user_agent: Optional[str]=None, perma_uuid: str=None, def scrape(self, url: str, cookies_pseudofile: Optional[BufferedIOBase]=None, depth: int=1, listing: bool=True, user_agent: Optional[str]=None, perma_uuid: str=None,
os: str=None, browser: str=None) -> Union[bool, str]: os: str=None, browser: str=None) -> Union[bool, str]:
if not url.startswith('http'): if not url.startswith('http'):
url = f'http://{url}' url = f'http://{url}'

View File

@ -65,8 +65,8 @@ def submit():
def scrape_web(): def scrape_web():
if request.form.get('url'): if request.form.get('url'):
# check if the post request has the file part # check if the post request has the file part
if 'file' in request.files and request.files['file'].filename: if 'cookies' in request.files and request.files['cookies'].filename:
cookie_file = BytesIO(request.files['file'].stream.read()) cookie_file = request.files['cookies'].stream
else: else:
cookie_file = None cookie_file = None
perma_uuid = lookyloo.scrape(url=request.form.get('url'), perma_uuid = lookyloo.scrape(url=request.form.get('url'),

View File

@ -8,7 +8,7 @@
alt="Lookyloo" width="500"> alt="Lookyloo" width="500">
</center> </center>
</br> </br>
<form role="form" action="scrape" method=post> <form role="form" action="scrape" method=post enctype=multipart/form-data>
<div class="form-group row"> <div class="form-group row">
<label for="url" class="col-sm-2 col-form-label">URL:</label> <label for="url" class="col-sm-2 col-form-label">URL:</label>
<div class="col-sm-10"> <div class="col-sm-10">
@ -44,7 +44,7 @@
<div class="form-group row"> <div class="form-group row">
<label for="cookies" class="col-sm-6">Cookies (JSON export from the Firefox plugin Cookie Quick Manager)</label> <label for="cookies" class="col-sm-6">Cookies (JSON export from the Firefox plugin Cookie Quick Manager)</label>
<div class="col-sm-4"> <div class="col-sm-4">
<input type="file" class="form-control-file" id="cookies"> <input type="file" class="form-control-file" id="cookies" name="cookies">
</div> </div>
</div> </div>