2021-01-15 16:57:37 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
|
|
|
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
|
2014-12-10 15:46:55 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
2018-05-10 13:10:27 +02:00
|
|
|
import logging
|
2021-01-21 20:53:58 +01:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
2021-01-15 16:57:37 +01:00
|
|
|
|
2021-03-01 18:23:46 +01:00
|
|
|
from twisted.web.server import Request
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2020-09-09 18:59:41 +02:00
|
|
|
from synapse.api.errors import SynapseError
|
2020-07-03 20:02:19 +02:00
|
|
|
from synapse.http.server import DirectServeJsonResource, set_cors_headers
|
2018-05-10 13:10:27 +02:00
|
|
|
from synapse.http.servlet import parse_integer, parse_string
|
2021-01-15 16:57:37 +01:00
|
|
|
from synapse.rest.media.v1.media_storage import MediaStorage
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2018-05-10 13:10:27 +02:00
|
|
|
from ._base import (
|
2018-07-09 08:09:20 +02:00
|
|
|
FileInfo,
|
|
|
|
parse_media_id,
|
|
|
|
respond_404,
|
|
|
|
respond_with_file,
|
2018-05-10 13:10:27 +02:00
|
|
|
respond_with_responder,
|
|
|
|
)
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2021-01-15 16:57:37 +01:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepository
|
2021-03-23 12:12:48 +01:00
|
|
|
from synapse.server import HomeServer
|
2021-01-15 16:57:37 +01:00
|
|
|
|
2014-12-10 15:46:55 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-07-03 20:02:19 +02:00
|
|
|
class ThumbnailResource(DirectServeJsonResource):
|
2014-12-10 15:46:55 +01:00
|
|
|
isLeaf = True
|
|
|
|
|
2021-01-15 16:57:37 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
hs: "HomeServer",
|
|
|
|
media_repo: "MediaRepository",
|
|
|
|
media_storage: MediaStorage,
|
|
|
|
):
|
2019-06-29 09:06:55 +02:00
|
|
|
super().__init__()
|
2016-04-19 12:24:59 +02:00
|
|
|
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.media_repo = media_repo
|
2018-01-09 12:08:46 +01:00
|
|
|
self.media_storage = media_storage
|
2016-04-19 12:24:59 +02:00
|
|
|
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
|
|
|
|
self.server_name = hs.hostname
|
|
|
|
|
2021-01-15 16:57:37 +01:00
|
|
|
async def _async_render_GET(self, request: Request) -> None:
|
2016-11-02 12:29:25 +01:00
|
|
|
set_cors_headers(request)
|
2015-07-03 12:24:55 +02:00
|
|
|
server_name, media_id, _ = parse_media_id(request)
|
2019-05-29 15:27:41 +02:00
|
|
|
width = parse_integer(request, "width", required=True)
|
|
|
|
height = parse_integer(request, "height", required=True)
|
2015-04-21 17:35:53 +02:00
|
|
|
method = parse_string(request, "method", "scale")
|
|
|
|
m_type = parse_string(request, "type", "image/png")
|
2014-12-10 15:46:55 +01:00
|
|
|
|
|
|
|
if server_name == self.server_name:
|
2015-08-12 11:54:38 +02:00
|
|
|
if self.dynamic_thumbnails:
|
2019-06-29 09:06:55 +02:00
|
|
|
await self._select_or_generate_local_thumbnail(
|
2015-08-12 11:54:38 +02:00
|
|
|
request, media_id, width, height, method, m_type
|
|
|
|
)
|
|
|
|
else:
|
2019-06-29 09:06:55 +02:00
|
|
|
await self._respond_local_thumbnail(
|
2015-08-12 11:54:38 +02:00
|
|
|
request, media_id, width, height, method, m_type
|
|
|
|
)
|
2018-01-18 16:06:24 +01:00
|
|
|
self.media_repo.mark_recently_accessed(None, media_id)
|
2014-12-10 15:46:55 +01:00
|
|
|
else:
|
2015-08-12 11:54:38 +02:00
|
|
|
if self.dynamic_thumbnails:
|
2019-06-29 09:06:55 +02:00
|
|
|
await self._select_or_generate_remote_thumbnail(
|
2019-06-20 11:32:02 +02:00
|
|
|
request, server_name, media_id, width, height, method, m_type
|
2015-08-12 11:54:38 +02:00
|
|
|
)
|
|
|
|
else:
|
2019-06-29 09:06:55 +02:00
|
|
|
await self._respond_remote_thumbnail(
|
2019-06-20 11:32:02 +02:00
|
|
|
request, server_name, media_id, width, height, method, m_type
|
2015-08-12 11:54:38 +02:00
|
|
|
)
|
2018-01-18 16:06:24 +01:00
|
|
|
self.media_repo.mark_recently_accessed(server_name, media_id)
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
async def _respond_local_thumbnail(
|
2021-01-15 16:57:37 +01:00
|
|
|
self,
|
|
|
|
request: Request,
|
|
|
|
media_id: str,
|
|
|
|
width: int,
|
|
|
|
height: int,
|
|
|
|
method: str,
|
|
|
|
m_type: str,
|
|
|
|
) -> None:
|
2020-03-20 12:20:02 +01:00
|
|
|
media_info = await self.store.get_local_media(media_id)
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2018-01-16 15:32:56 +01:00
|
|
|
if not media_info:
|
|
|
|
respond_404(request)
|
|
|
|
return
|
|
|
|
if media_info["quarantined_by"]:
|
|
|
|
logger.info("Media is quarantined")
|
2016-04-19 12:24:59 +02:00
|
|
|
respond_404(request)
|
2014-12-10 15:46:55 +01:00
|
|
|
return
|
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
thumbnail_infos = await self.store.get_local_media_thumbnails(media_id)
|
2021-01-21 20:53:58 +01:00
|
|
|
await self._select_and_respond_with_thumbnail(
|
|
|
|
request,
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
method,
|
|
|
|
m_type,
|
|
|
|
thumbnail_infos,
|
|
|
|
media_id,
|
2021-02-18 17:22:21 +01:00
|
|
|
media_id,
|
2021-01-21 20:53:58 +01:00
|
|
|
url_cache=media_info["url_cache"],
|
|
|
|
server_name=None,
|
|
|
|
)
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
async def _select_or_generate_local_thumbnail(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2021-01-15 16:57:37 +01:00
|
|
|
request: Request,
|
|
|
|
media_id: str,
|
|
|
|
desired_width: int,
|
|
|
|
desired_height: int,
|
|
|
|
desired_method: str,
|
|
|
|
desired_type: str,
|
|
|
|
) -> None:
|
2020-03-20 12:20:02 +01:00
|
|
|
media_info = await self.store.get_local_media(media_id)
|
2015-07-23 15:12:49 +02:00
|
|
|
|
2018-01-16 15:32:56 +01:00
|
|
|
if not media_info:
|
|
|
|
respond_404(request)
|
|
|
|
return
|
|
|
|
if media_info["quarantined_by"]:
|
|
|
|
logger.info("Media is quarantined")
|
2016-04-19 12:24:59 +02:00
|
|
|
respond_404(request)
|
2015-07-23 15:12:49 +02:00
|
|
|
return
|
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
thumbnail_infos = await self.store.get_local_media_thumbnails(media_id)
|
2015-07-23 15:12:49 +02:00
|
|
|
for info in thumbnail_infos:
|
|
|
|
t_w = info["thumbnail_width"] == desired_width
|
|
|
|
t_h = info["thumbnail_height"] == desired_height
|
|
|
|
t_method = info["thumbnail_method"] == desired_method
|
|
|
|
t_type = info["thumbnail_type"] == desired_type
|
|
|
|
|
|
|
|
if t_w and t_h and t_method and t_type:
|
2018-01-09 12:08:46 +01:00
|
|
|
file_info = FileInfo(
|
2019-06-20 11:32:02 +02:00
|
|
|
server_name=None,
|
|
|
|
file_id=media_id,
|
2018-01-09 12:08:46 +01:00
|
|
|
url_cache=media_info["url_cache"],
|
|
|
|
thumbnail=True,
|
|
|
|
thumbnail_width=info["thumbnail_width"],
|
|
|
|
thumbnail_height=info["thumbnail_height"],
|
|
|
|
thumbnail_type=info["thumbnail_type"],
|
|
|
|
thumbnail_method=info["thumbnail_method"],
|
|
|
|
)
|
|
|
|
|
|
|
|
t_type = file_info.thumbnail_type
|
|
|
|
t_length = info["thumbnail_length"]
|
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
responder = await self.media_storage.fetch_media(file_info)
|
2018-01-09 12:08:46 +01:00
|
|
|
if responder:
|
2020-03-20 12:20:02 +01:00
|
|
|
await respond_with_responder(request, responder, t_type, t_length)
|
2018-01-09 12:08:46 +01:00
|
|
|
return
|
2015-07-23 15:12:49 +02:00
|
|
|
|
2018-01-16 14:53:43 +01:00
|
|
|
logger.debug("We don't have a thumbnail of that size. Generating")
|
2015-07-23 15:12:49 +02:00
|
|
|
|
|
|
|
# Okay, so we generate one.
|
2020-03-20 12:20:02 +01:00
|
|
|
file_path = await self.media_repo.generate_local_exact_thumbnail(
|
2019-06-20 11:32:02 +02:00
|
|
|
media_id,
|
|
|
|
desired_width,
|
|
|
|
desired_height,
|
|
|
|
desired_method,
|
|
|
|
desired_type,
|
2018-01-16 11:52:32 +01:00
|
|
|
url_cache=media_info["url_cache"],
|
2015-07-23 15:12:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if file_path:
|
2020-03-20 12:20:02 +01:00
|
|
|
await respond_with_file(request, desired_type, file_path)
|
2015-07-23 15:12:49 +02:00
|
|
|
else:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning("Failed to generate thumbnail")
|
2020-09-09 18:59:41 +02:00
|
|
|
raise SynapseError(400, "Failed to generate thumbnail.")
|
2015-07-23 15:12:49 +02:00
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
async def _select_or_generate_remote_thumbnail(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
2021-01-15 16:57:37 +01:00
|
|
|
request: Request,
|
|
|
|
server_name: str,
|
|
|
|
media_id: str,
|
|
|
|
desired_width: int,
|
|
|
|
desired_height: int,
|
|
|
|
desired_method: str,
|
|
|
|
desired_type: str,
|
|
|
|
) -> None:
|
2020-03-20 12:20:02 +01:00
|
|
|
media_info = await self.media_repo.get_remote_media_info(server_name, media_id)
|
2015-07-23 15:12:49 +02:00
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
thumbnail_infos = await self.store.get_remote_media_thumbnails(
|
2019-06-20 11:32:02 +02:00
|
|
|
server_name, media_id
|
2015-07-23 15:12:49 +02:00
|
|
|
)
|
|
|
|
|
2015-07-23 15:24:21 +02:00
|
|
|
file_id = media_info["filesystem_id"]
|
|
|
|
|
2015-07-23 15:12:49 +02:00
|
|
|
for info in thumbnail_infos:
|
|
|
|
t_w = info["thumbnail_width"] == desired_width
|
|
|
|
t_h = info["thumbnail_height"] == desired_height
|
|
|
|
t_method = info["thumbnail_method"] == desired_method
|
|
|
|
t_type = info["thumbnail_type"] == desired_type
|
|
|
|
|
|
|
|
if t_w and t_h and t_method and t_type:
|
2018-01-09 12:08:46 +01:00
|
|
|
file_info = FileInfo(
|
2019-06-20 11:32:02 +02:00
|
|
|
server_name=server_name,
|
|
|
|
file_id=media_info["filesystem_id"],
|
2018-01-09 12:08:46 +01:00
|
|
|
thumbnail=True,
|
|
|
|
thumbnail_width=info["thumbnail_width"],
|
|
|
|
thumbnail_height=info["thumbnail_height"],
|
|
|
|
thumbnail_type=info["thumbnail_type"],
|
|
|
|
thumbnail_method=info["thumbnail_method"],
|
2015-07-23 15:12:49 +02:00
|
|
|
)
|
2018-01-09 12:08:46 +01:00
|
|
|
|
|
|
|
t_type = file_info.thumbnail_type
|
|
|
|
t_length = info["thumbnail_length"]
|
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
responder = await self.media_storage.fetch_media(file_info)
|
2018-01-09 12:08:46 +01:00
|
|
|
if responder:
|
2020-03-20 12:20:02 +01:00
|
|
|
await respond_with_responder(request, responder, t_type, t_length)
|
2018-01-09 12:08:46 +01:00
|
|
|
return
|
2015-07-23 15:12:49 +02:00
|
|
|
|
2018-01-16 14:53:43 +01:00
|
|
|
logger.debug("We don't have a thumbnail of that size. Generating")
|
2015-07-23 15:12:49 +02:00
|
|
|
|
|
|
|
# Okay, so we generate one.
|
2020-03-20 12:20:02 +01:00
|
|
|
file_path = await self.media_repo.generate_remote_exact_thumbnail(
|
2019-06-20 11:32:02 +02:00
|
|
|
server_name,
|
|
|
|
file_id,
|
|
|
|
media_id,
|
|
|
|
desired_width,
|
|
|
|
desired_height,
|
|
|
|
desired_method,
|
|
|
|
desired_type,
|
2015-07-23 15:12:49 +02:00
|
|
|
)
|
|
|
|
|
2015-07-23 15:24:21 +02:00
|
|
|
if file_path:
|
2020-03-20 12:20:02 +01:00
|
|
|
await respond_with_file(request, desired_type, file_path)
|
2015-07-23 15:12:49 +02:00
|
|
|
else:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning("Failed to generate thumbnail")
|
2020-09-09 18:59:41 +02:00
|
|
|
raise SynapseError(400, "Failed to generate thumbnail.")
|
2015-07-23 15:12:49 +02:00
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
async def _respond_remote_thumbnail(
|
2021-01-15 16:57:37 +01:00
|
|
|
self,
|
|
|
|
request: Request,
|
|
|
|
server_name: str,
|
|
|
|
media_id: str,
|
|
|
|
width: int,
|
|
|
|
height: int,
|
|
|
|
method: str,
|
|
|
|
m_type: str,
|
|
|
|
) -> None:
|
2018-01-12 16:38:06 +01:00
|
|
|
# TODO: Don't download the whole remote file
|
|
|
|
# We should proxy the thumbnail from the remote server instead of
|
|
|
|
# downloading the remote file and generating our own thumbnails.
|
2020-03-20 12:20:02 +01:00
|
|
|
media_info = await self.media_repo.get_remote_media_info(server_name, media_id)
|
2018-01-12 16:38:06 +01:00
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
thumbnail_infos = await self.store.get_remote_media_thumbnails(
|
2019-06-20 11:32:02 +02:00
|
|
|
server_name, media_id
|
2014-12-10 15:46:55 +01:00
|
|
|
)
|
2021-01-21 20:53:58 +01:00
|
|
|
await self._select_and_respond_with_thumbnail(
|
|
|
|
request,
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
method,
|
|
|
|
m_type,
|
|
|
|
thumbnail_infos,
|
2021-02-18 17:22:21 +01:00
|
|
|
media_id,
|
2021-01-21 20:53:58 +01:00
|
|
|
media_info["filesystem_id"],
|
|
|
|
url_cache=None,
|
|
|
|
server_name=server_name,
|
|
|
|
)
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2021-01-21 20:53:58 +01:00
|
|
|
async def _select_and_respond_with_thumbnail(
|
|
|
|
self,
|
|
|
|
request: Request,
|
|
|
|
desired_width: int,
|
|
|
|
desired_height: int,
|
|
|
|
desired_method: str,
|
|
|
|
desired_type: str,
|
|
|
|
thumbnail_infos: List[Dict[str, Any]],
|
2021-02-18 17:22:21 +01:00
|
|
|
media_id: str,
|
2021-01-21 20:53:58 +01:00
|
|
|
file_id: str,
|
|
|
|
url_cache: Optional[str] = None,
|
|
|
|
server_name: Optional[str] = None,
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Respond to a request with an appropriate thumbnail from the previously generated thumbnails.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
request: The incoming request.
|
|
|
|
desired_width: The desired width, the returned thumbnail may be larger than this.
|
|
|
|
desired_height: The desired height, the returned thumbnail may be larger than this.
|
|
|
|
desired_method: The desired method used to generate the thumbnail.
|
|
|
|
desired_type: The desired content-type of the thumbnail.
|
|
|
|
thumbnail_infos: A list of dictionaries of candidate thumbnails.
|
|
|
|
file_id: The ID of the media that a thumbnail is being requested for.
|
|
|
|
url_cache: The URL cache value.
|
|
|
|
server_name: The server name, if this is a remote thumbnail.
|
|
|
|
"""
|
2014-12-10 15:46:55 +01:00
|
|
|
if thumbnail_infos:
|
2021-01-21 20:53:58 +01:00
|
|
|
file_info = self._select_thumbnail(
|
|
|
|
desired_width,
|
|
|
|
desired_height,
|
|
|
|
desired_method,
|
|
|
|
desired_type,
|
|
|
|
thumbnail_infos,
|
|
|
|
file_id,
|
|
|
|
url_cache,
|
|
|
|
server_name,
|
2014-12-10 15:46:55 +01:00
|
|
|
)
|
2021-01-21 20:53:58 +01:00
|
|
|
if not file_info:
|
|
|
|
logger.info("Couldn't find a thumbnail matching the desired inputs")
|
|
|
|
respond_404(request)
|
|
|
|
return
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2021-02-18 17:22:21 +01:00
|
|
|
responder = await self.media_storage.fetch_media(file_info)
|
|
|
|
if responder:
|
|
|
|
await respond_with_responder(
|
|
|
|
request,
|
|
|
|
responder,
|
|
|
|
file_info.thumbnail_type,
|
|
|
|
file_info.thumbnail_length,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
# If we can't find the thumbnail we regenerate it. This can happen
|
|
|
|
# if e.g. we've deleted the thumbnails but still have the original
|
|
|
|
# image somewhere.
|
|
|
|
#
|
|
|
|
# Since we have an entry for the thumbnail in the DB we a) know we
|
|
|
|
# have have successfully generated the thumbnail in the past (so we
|
|
|
|
# don't need to worry about repeatedly failing to generate
|
|
|
|
# thumbnails), and b) have already calculated that appropriate
|
|
|
|
# width/height/method so we can just call the "generate exact"
|
|
|
|
# methods.
|
|
|
|
|
2021-02-19 11:46:18 +01:00
|
|
|
# First let's check that we do actually have the original image
|
|
|
|
# still. This will throw a 404 if we don't.
|
|
|
|
# TODO: We should refetch the thumbnails for remote media.
|
|
|
|
await self.media_storage.ensure_media_is_in_local_cache(
|
|
|
|
FileInfo(server_name, file_id, url_cache=url_cache)
|
|
|
|
)
|
|
|
|
|
2021-02-18 17:22:21 +01:00
|
|
|
if server_name:
|
|
|
|
await self.media_repo.generate_remote_exact_thumbnail(
|
|
|
|
server_name,
|
|
|
|
file_id=file_id,
|
|
|
|
media_id=media_id,
|
|
|
|
t_width=file_info.thumbnail_width,
|
|
|
|
t_height=file_info.thumbnail_height,
|
|
|
|
t_method=file_info.thumbnail_method,
|
|
|
|
t_type=file_info.thumbnail_type,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
await self.media_repo.generate_local_exact_thumbnail(
|
|
|
|
media_id=media_id,
|
|
|
|
t_width=file_info.thumbnail_width,
|
|
|
|
t_height=file_info.thumbnail_height,
|
|
|
|
t_method=file_info.thumbnail_method,
|
|
|
|
t_type=file_info.thumbnail_type,
|
|
|
|
url_cache=url_cache,
|
|
|
|
)
|
|
|
|
|
2020-03-20 12:20:02 +01:00
|
|
|
responder = await self.media_storage.fetch_media(file_info)
|
2021-01-21 20:53:58 +01:00
|
|
|
await respond_with_responder(
|
2021-02-18 17:22:21 +01:00
|
|
|
request,
|
|
|
|
responder,
|
|
|
|
file_info.thumbnail_type,
|
|
|
|
file_info.thumbnail_length,
|
2021-01-21 20:53:58 +01:00
|
|
|
)
|
2014-12-10 15:46:55 +01:00
|
|
|
else:
|
2018-01-16 13:01:40 +01:00
|
|
|
logger.info("Failed to find any generated thumbnails")
|
2016-04-19 12:24:59 +02:00
|
|
|
respond_404(request)
|
2014-12-10 15:46:55 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
def _select_thumbnail(
|
|
|
|
self,
|
2021-01-15 16:57:37 +01:00
|
|
|
desired_width: int,
|
|
|
|
desired_height: int,
|
|
|
|
desired_method: str,
|
|
|
|
desired_type: str,
|
2021-01-21 20:53:58 +01:00
|
|
|
thumbnail_infos: List[Dict[str, Any]],
|
|
|
|
file_id: str,
|
|
|
|
url_cache: Optional[str],
|
|
|
|
server_name: Optional[str],
|
|
|
|
) -> Optional[FileInfo]:
|
|
|
|
"""
|
|
|
|
Choose an appropriate thumbnail from the previously generated thumbnails.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
desired_width: The desired width, the returned thumbnail may be larger than this.
|
|
|
|
desired_height: The desired height, the returned thumbnail may be larger than this.
|
|
|
|
desired_method: The desired method used to generate the thumbnail.
|
|
|
|
desired_type: The desired content-type of the thumbnail.
|
|
|
|
thumbnail_infos: A list of dictionaries of candidate thumbnails.
|
|
|
|
file_id: The ID of the media that a thumbnail is being requested for.
|
|
|
|
url_cache: The URL cache value.
|
|
|
|
server_name: The server name, if this is a remote thumbnail.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The thumbnail which best matches the desired parameters.
|
|
|
|
"""
|
|
|
|
desired_method = desired_method.lower()
|
|
|
|
|
|
|
|
# The chosen thumbnail.
|
|
|
|
thumbnail_info = None
|
|
|
|
|
2014-12-10 15:46:55 +01:00
|
|
|
d_w = desired_width
|
|
|
|
d_h = desired_height
|
|
|
|
|
2021-01-21 20:53:58 +01:00
|
|
|
if desired_method == "crop":
|
|
|
|
# Thumbnails that match equal or larger sizes of desired width/height.
|
2020-01-20 18:38:21 +01:00
|
|
|
crop_info_list = []
|
2021-01-21 20:53:58 +01:00
|
|
|
# Other thumbnails.
|
2020-01-20 18:38:21 +01:00
|
|
|
crop_info_list2 = []
|
2014-12-10 15:46:55 +01:00
|
|
|
for info in thumbnail_infos:
|
2021-01-21 20:53:58 +01:00
|
|
|
# Skip thumbnails generated with different methods.
|
|
|
|
if info["thumbnail_method"] != "crop":
|
|
|
|
continue
|
|
|
|
|
2014-12-10 15:46:55 +01:00
|
|
|
t_w = info["thumbnail_width"]
|
|
|
|
t_h = info["thumbnail_height"]
|
2021-01-21 20:53:58 +01:00
|
|
|
aspect_quality = abs(d_w * t_h - d_h * t_w)
|
|
|
|
min_quality = 0 if d_w <= t_w and d_h <= t_h else 1
|
|
|
|
size_quality = abs((d_w - t_w) * (d_h - t_h))
|
|
|
|
type_quality = desired_type != info["thumbnail_type"]
|
|
|
|
length_quality = info["thumbnail_length"]
|
|
|
|
if t_w >= d_w or t_h >= d_h:
|
|
|
|
crop_info_list.append(
|
|
|
|
(
|
|
|
|
aspect_quality,
|
|
|
|
min_quality,
|
|
|
|
size_quality,
|
|
|
|
type_quality,
|
|
|
|
length_quality,
|
|
|
|
info,
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2021-01-21 20:53:58 +01:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
crop_info_list2.append(
|
|
|
|
(
|
|
|
|
aspect_quality,
|
|
|
|
min_quality,
|
|
|
|
size_quality,
|
|
|
|
type_quality,
|
|
|
|
length_quality,
|
|
|
|
info,
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2021-01-21 20:53:58 +01:00
|
|
|
)
|
2020-01-20 18:38:21 +01:00
|
|
|
if crop_info_list:
|
2021-01-21 20:53:58 +01:00
|
|
|
thumbnail_info = min(crop_info_list)[-1]
|
|
|
|
elif crop_info_list2:
|
|
|
|
thumbnail_info = min(crop_info_list2)[-1]
|
|
|
|
elif desired_method == "scale":
|
|
|
|
# Thumbnails that match equal or larger sizes of desired width/height.
|
2014-12-10 15:46:55 +01:00
|
|
|
info_list = []
|
2021-01-21 20:53:58 +01:00
|
|
|
# Other thumbnails.
|
2014-12-19 13:05:26 +01:00
|
|
|
info_list2 = []
|
2021-01-21 20:53:58 +01:00
|
|
|
|
2014-12-10 15:46:55 +01:00
|
|
|
for info in thumbnail_infos:
|
2021-01-21 20:53:58 +01:00
|
|
|
# Skip thumbnails generated with different methods.
|
|
|
|
if info["thumbnail_method"] != "scale":
|
|
|
|
continue
|
|
|
|
|
2014-12-10 15:46:55 +01:00
|
|
|
t_w = info["thumbnail_width"]
|
|
|
|
t_h = info["thumbnail_height"]
|
2014-12-19 13:05:26 +01:00
|
|
|
size_quality = abs((d_w - t_w) * (d_h - t_h))
|
|
|
|
type_quality = desired_type != info["thumbnail_type"]
|
|
|
|
length_quality = info["thumbnail_length"]
|
2021-01-21 20:53:58 +01:00
|
|
|
if t_w >= d_w or t_h >= d_h:
|
2019-06-20 11:32:02 +02:00
|
|
|
info_list.append((size_quality, type_quality, length_quality, info))
|
2021-01-21 20:53:58 +01:00
|
|
|
else:
|
2019-06-20 11:32:02 +02:00
|
|
|
info_list2.append(
|
|
|
|
(size_quality, type_quality, length_quality, info)
|
|
|
|
)
|
2014-12-19 13:05:26 +01:00
|
|
|
if info_list:
|
2021-01-21 20:53:58 +01:00
|
|
|
thumbnail_info = min(info_list)[-1]
|
|
|
|
elif info_list2:
|
|
|
|
thumbnail_info = min(info_list2)[-1]
|
|
|
|
|
|
|
|
if thumbnail_info:
|
|
|
|
return FileInfo(
|
|
|
|
file_id=file_id,
|
|
|
|
url_cache=url_cache,
|
|
|
|
server_name=server_name,
|
|
|
|
thumbnail=True,
|
|
|
|
thumbnail_width=thumbnail_info["thumbnail_width"],
|
|
|
|
thumbnail_height=thumbnail_info["thumbnail_height"],
|
|
|
|
thumbnail_type=thumbnail_info["thumbnail_type"],
|
|
|
|
thumbnail_method=thumbnail_info["thumbnail_method"],
|
|
|
|
thumbnail_length=thumbnail_info["thumbnail_length"],
|
|
|
|
)
|
|
|
|
|
|
|
|
# No matching thumbnail was found.
|
|
|
|
return None
|