chg: Make mypy happy

pull/79/head
Raphaël Vinot 2020-06-25 17:08:11 +02:00
parent b818055f32
commit 98495c40a9
4 changed files with 25 additions and 25 deletions

View File

@ -155,7 +155,7 @@ def ua_parser(html_content: str) -> Dict[str, Any]:
uas = soup.find_all('textarea')[1].text
except Exception:
traceback.print_exc()
return
return {}
to_store: Dict[str, Any] = {'by_frequency': []}
for ua in json.loads(uas):

View File

@ -84,16 +84,16 @@ class Lookyloo():
uas = Counter([entry.split('|', 1)[1] for entry in entries])
for ua, count in uas.most_common():
parsed_ua = UserAgent(ua)
if not parsed_ua.platform or not parsed_ua.browser:
if not parsed_ua.platform or not parsed_ua.browser: # type: ignore
continue
if parsed_ua.platform not in to_store:
to_store[parsed_ua.platform] = {}
if f'{parsed_ua.browser} {parsed_ua.version}' not in to_store[parsed_ua.platform]:
to_store[parsed_ua.platform][f'{parsed_ua.browser} {parsed_ua.version}'] = []
to_store[parsed_ua.platform][f'{parsed_ua.browser} {parsed_ua.version}'].append(parsed_ua.string)
to_store['by_frequency'].append({'os': parsed_ua.platform,
'browser': f'{parsed_ua.browser} {parsed_ua.version}',
'useragent': parsed_ua.string})
if parsed_ua.platform not in to_store: # type: ignore
to_store[parsed_ua.platform] = {} # type: ignore
if f'{parsed_ua.browser} {parsed_ua.version}' not in to_store[parsed_ua.platform]: # type: ignore
to_store[parsed_ua.platform][f'{parsed_ua.browser} {parsed_ua.version}'] = [] # type: ignore
to_store[parsed_ua.platform][f'{parsed_ua.browser} {parsed_ua.version}'].append(parsed_ua.string) # type: ignore
to_store['by_frequency'].append({'os': parsed_ua.platform, # type: ignore
'browser': f'{parsed_ua.browser} {parsed_ua.version}', # type: ignore
'useragent': parsed_ua.string}) # type: ignore
with self_generated_ua_file.open('w') as f:
json.dump(to_store, f, indent=2)
@ -354,20 +354,20 @@ class Lookyloo():
return
ua = UserAgent(tree.root_hartree.user_agent)
to_dump = {}
if ua.platform:
to_dump['os'] = ua.platform
if ua.browser:
if ua.version:
to_dump['browser'] = f'{ua.browser} {ua.version}'
if ua.platform: # type: ignore
to_dump['os'] = ua.platform # type: ignore
if ua.browser: # type: ignore
if ua.version: # type: ignore
to_dump['browser'] = f'{ua.browser} {ua.version}' # type: ignore
else:
to_dump['browser'] = ua.browser
if ua.language:
to_dump['language'] = ua.language
to_dump['browser'] = ua.browser # type: ignore
if ua.language: # type: ignore
to_dump['language'] = ua.language # type: ignore
if not to_dump:
# UA not recognized
self.logger.info(f'Unable to recognize the User agent: {ua}')
to_dump['user_agent'] = ua.string
to_dump['user_agent'] = ua.string # type: ignore
with metafile.open('w') as f:
json.dump(to_dump, f)

View File

@ -86,7 +86,7 @@ class SaneJavaScript():
with sanejs_unknowns.open() as f:
unknown_hashes = [line.strip() for line in f.readlines()]
to_return = {h: details for h, details in self.skip_lookup.items() if h in sha512}
to_return: Dict[str, Union[str, List[str]]] = {h: details for h, details in self.skip_lookup.items() if h in sha512}
to_lookup = [h for h in hashes if h not in self.skip_lookup]
if not force:

View File

@ -62,7 +62,9 @@ app.jinja_env.globals.update(sizeof_fmt=sizeof_fmt)
@app.after_request
def after_request(response):
lookyloo.cache_user_agents(request.headers.get('User-Agent'), request.remote_addr)
ua = request.headers.get('User-Agent')
if ua:
lookyloo.cache_user_agents(ua, request.remote_addr)
return response
@ -121,14 +123,12 @@ def scrape_web():
user_agent=request.form.get('user_agent'),
os=request.form.get('os'), browser=request.form.get('browser'))
return redirect(url_for('tree', tree_uuid=perma_uuid))
user_agents: Dict[str, Any] = {}
if lookyloo.get_config('use_user_agents_users'):
lookyloo.build_ua_file()
# NOTE: For now, just generate the file, so we have an idea of the size
# user_agents = get_user_agents('own_user_agents')
user_agents = {}
if not user_agents:
user_agents = get_user_agents()
else:
if not user_agents:
user_agents = get_user_agents()
user_agents.pop('by_frequency')
return render_template('scrape.html', user_agents=user_agents)