2014-09-03 18:04:00 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2019-09-23 13:28:01 +02:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-09-03 18:04:00 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2019-08-13 13:49:28 +02:00
|
|
|
|
2018-12-21 16:04:57 +01:00
|
|
|
import os
|
2015-08-12 11:55:27 +02:00
|
|
|
from collections import namedtuple
|
2019-10-02 14:29:01 +02:00
|
|
|
from typing import Dict, List
|
2015-08-12 11:55:27 +02:00
|
|
|
|
2019-09-11 15:00:37 +02:00
|
|
|
from synapse.python_dependencies import DependencyException, check_requirements
|
2018-01-16 16:44:08 +01:00
|
|
|
from synapse.util.module_loader import load_module
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from ._base import Config, ConfigError
|
2016-04-13 12:57:46 +02:00
|
|
|
|
2019-03-19 11:06:40 +01:00
|
|
|
DEFAULT_THUMBNAIL_SIZES = [
|
2019-06-20 11:32:02 +02:00
|
|
|
{"width": 32, "height": 32, "method": "crop"},
|
|
|
|
{"width": 96, "height": 96, "method": "crop"},
|
|
|
|
{"width": 320, "height": 240, "method": "scale"},
|
|
|
|
{"width": 640, "height": 480, "method": "scale"},
|
|
|
|
{"width": 800, "height": 600, "method": "scale"},
|
2019-03-19 11:06:40 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
THUMBNAIL_SIZE_YAML = """\
|
|
|
|
# - width: %(width)i
|
|
|
|
# height: %(height)i
|
|
|
|
# method: %(method)s
|
|
|
|
"""
|
|
|
|
|
2015-08-12 11:55:27 +02:00
|
|
|
ThumbnailRequirement = namedtuple(
|
|
|
|
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
|
|
|
)
|
|
|
|
|
2018-01-16 16:44:08 +01:00
|
|
|
MediaStorageProviderConfig = namedtuple(
|
2019-06-20 11:32:02 +02:00
|
|
|
"MediaStorageProviderConfig",
|
|
|
|
(
|
2018-01-18 18:11:20 +01:00
|
|
|
"store_local", # Whether to store newly uploaded local files
|
|
|
|
"store_remote", # Whether to store newly downloaded remote files
|
|
|
|
"store_synchronous", # Whether to wait for successful storage for local uploads
|
|
|
|
),
|
2018-01-16 16:44:08 +01:00
|
|
|
)
|
|
|
|
|
2015-08-13 18:34:22 +02:00
|
|
|
|
2015-08-12 11:55:27 +02:00
|
|
|
def parse_thumbnail_requirements(thumbnail_sizes):
|
2015-08-12 15:29:17 +02:00
|
|
|
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
2016-04-08 19:37:15 +02:00
|
|
|
and creates a map from image media types to the thumbnail size, thumbnailing
|
2015-08-12 15:29:17 +02:00
|
|
|
method, and thumbnail media type to precalculate
|
|
|
|
|
|
|
|
Args:
|
|
|
|
thumbnail_sizes(list): List of dicts with "width", "height", and
|
|
|
|
"method" keys
|
|
|
|
Returns:
|
|
|
|
Dictionary mapping from media type string to list of
|
|
|
|
ThumbnailRequirement tuples.
|
|
|
|
"""
|
2019-10-02 14:29:01 +02:00
|
|
|
requirements = {} # type: Dict[str, List]
|
2015-08-12 11:55:27 +02:00
|
|
|
for size in thumbnail_sizes:
|
|
|
|
width = size["width"]
|
|
|
|
height = size["height"]
|
|
|
|
method = size["method"]
|
|
|
|
jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
|
|
|
|
png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
|
|
|
|
requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
|
2020-06-05 12:54:27 +02:00
|
|
|
requirements.setdefault("image/webp", []).append(jpeg_thumbnail)
|
2015-08-12 11:55:27 +02:00
|
|
|
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
|
|
|
requirements.setdefault("image/png", []).append(png_thumbnail)
|
|
|
|
return {
|
2019-06-20 11:32:02 +02:00
|
|
|
media_type: tuple(thumbnails) for media_type, thumbnails in requirements.items()
|
2015-08-12 11:55:27 +02:00
|
|
|
}
|
2014-09-03 18:04:00 +02:00
|
|
|
|
2014-10-30 12:10:17 +01:00
|
|
|
|
2014-09-03 18:04:00 +02:00
|
|
|
class ContentRepositoryConfig(Config):
|
2019-10-10 10:39:35 +02:00
|
|
|
section = "media"
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
def read_config(self, config, **kwargs):
|
2019-08-13 13:49:28 +02:00
|
|
|
|
|
|
|
# Only enable the media repo if either the media repo is enabled or the
|
|
|
|
# current worker app is the media repo.
|
|
|
|
if (
|
|
|
|
self.enable_media_repo is False
|
2019-08-13 18:05:11 +02:00
|
|
|
and config.get("worker_app") != "synapse.app.media_repository"
|
2019-08-13 13:49:28 +02:00
|
|
|
):
|
|
|
|
self.can_load_media_repo = False
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
self.can_load_media_repo = True
|
|
|
|
|
2020-06-17 15:13:30 +02:00
|
|
|
# Whether this instance should be the one to run the background jobs to
|
|
|
|
# e.g clean up old URL previews.
|
|
|
|
self.media_instance_running_background_jobs = config.get(
|
|
|
|
"media_instance_running_background_jobs",
|
|
|
|
)
|
|
|
|
|
2019-03-19 11:06:40 +01:00
|
|
|
self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M"))
|
|
|
|
self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
|
|
|
|
self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))
|
2017-10-12 16:20:59 +02:00
|
|
|
|
2019-06-22 00:39:08 +02:00
|
|
|
self.media_store_path = self.ensure_directory(
|
|
|
|
config.get("media_store_path", "media_store")
|
|
|
|
)
|
2017-10-12 16:20:59 +02:00
|
|
|
|
2018-01-16 16:44:08 +01:00
|
|
|
backup_media_store_path = config.get("backup_media_store_path")
|
2017-10-12 16:20:59 +02:00
|
|
|
|
2018-01-16 16:44:08 +01:00
|
|
|
synchronous_backup_media_store = config.get(
|
2017-10-12 16:20:59 +02:00
|
|
|
"synchronous_backup_media_store", False
|
|
|
|
)
|
|
|
|
|
2018-01-16 16:44:08 +01:00
|
|
|
storage_providers = config.get("media_storage_providers", [])
|
|
|
|
|
|
|
|
if backup_media_store_path:
|
|
|
|
if storage_providers:
|
|
|
|
raise ConfigError(
|
|
|
|
"Cannot use both 'backup_media_store_path' and 'storage_providers'"
|
|
|
|
)
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
storage_providers = [
|
|
|
|
{
|
|
|
|
"module": "file_system",
|
|
|
|
"store_local": True,
|
|
|
|
"store_synchronous": synchronous_backup_media_store,
|
|
|
|
"store_remote": True,
|
|
|
|
"config": {"directory": backup_media_store_path},
|
2018-01-16 16:44:08 +01:00
|
|
|
}
|
2019-06-20 11:32:02 +02:00
|
|
|
]
|
2018-01-16 16:44:08 +01:00
|
|
|
|
|
|
|
# This is a list of config that can be used to create the storage
|
|
|
|
# providers. The entries are tuples of (Class, class_config,
|
|
|
|
# MediaStorageProviderConfig), where Class is the class of the provider,
|
|
|
|
# the class_config the config to pass to it, and
|
|
|
|
# MediaStorageProviderConfig are options for StorageProviderWrapper.
|
|
|
|
#
|
|
|
|
# We don't create the storage providers here as not all workers need
|
|
|
|
# them to be started.
|
2019-10-02 14:29:01 +02:00
|
|
|
self.media_storage_providers = [] # type: List[tuple]
|
2018-01-16 16:44:08 +01:00
|
|
|
|
|
|
|
for provider_config in storage_providers:
|
|
|
|
# We special case the module "file_system" so as not to need to
|
|
|
|
# expose FileStorageProviderBackend
|
|
|
|
if provider_config["module"] == "file_system":
|
|
|
|
provider_config["module"] = (
|
|
|
|
"synapse.rest.media.v1.storage_provider"
|
|
|
|
".FileStorageProviderBackend"
|
|
|
|
)
|
|
|
|
|
|
|
|
provider_class, parsed_config = load_module(provider_config)
|
|
|
|
|
|
|
|
wrapper_config = MediaStorageProviderConfig(
|
|
|
|
provider_config.get("store_local", False),
|
|
|
|
provider_config.get("store_remote", False),
|
|
|
|
provider_config.get("store_synchronous", False),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.media_storage_providers.append(
|
2019-06-20 11:32:02 +02:00
|
|
|
(provider_class, parsed_config, wrapper_config)
|
2018-01-16 16:44:08 +01:00
|
|
|
)
|
|
|
|
|
2019-03-19 11:06:40 +01:00
|
|
|
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
|
2015-08-12 11:55:27 +02:00
|
|
|
self.thumbnail_requirements = parse_thumbnail_requirements(
|
2019-06-20 11:32:02 +02:00
|
|
|
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
|
2015-08-12 11:55:27 +02:00
|
|
|
)
|
2016-04-13 12:57:46 +02:00
|
|
|
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
2016-04-08 19:37:15 +02:00
|
|
|
if self.url_preview_enabled:
|
2016-04-13 12:57:46 +02:00
|
|
|
try:
|
2019-09-11 15:00:37 +02:00
|
|
|
check_requirements("url_preview")
|
2016-04-13 12:57:46 +02:00
|
|
|
|
2019-09-11 15:00:37 +02:00
|
|
|
except DependencyException as e:
|
|
|
|
raise ConfigError(e.message)
|
2016-04-13 12:57:46 +02:00
|
|
|
|
2019-05-03 14:46:50 +02:00
|
|
|
if "url_preview_ip_range_blacklist" not in config:
|
2016-04-13 14:52:57 +02:00
|
|
|
raise ConfigError(
|
|
|
|
"For security, you must specify an explicit target IP address "
|
|
|
|
"blacklist in url_preview_ip_range_blacklist for url previewing "
|
|
|
|
"to work"
|
|
|
|
)
|
2016-04-13 12:57:46 +02:00
|
|
|
|
2019-09-11 15:00:37 +02:00
|
|
|
# netaddr is a dependency for url_preview
|
|
|
|
from netaddr import IPSet
|
|
|
|
|
2019-05-03 14:46:50 +02:00
|
|
|
self.url_preview_ip_range_blacklist = IPSet(
|
|
|
|
config["url_preview_ip_range_blacklist"]
|
|
|
|
)
|
|
|
|
|
|
|
|
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
|
|
|
# unroutable addresses.
|
2019-06-20 11:32:02 +02:00
|
|
|
self.url_preview_ip_range_blacklist.update(["0.0.0.0", "::"])
|
2019-05-03 14:46:50 +02:00
|
|
|
|
2016-05-16 14:03:59 +02:00
|
|
|
self.url_preview_ip_range_whitelist = IPSet(
|
|
|
|
config.get("url_preview_ip_range_whitelist", ())
|
|
|
|
)
|
2016-05-01 13:44:24 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
|
2014-09-03 18:04:00 +02:00
|
|
|
|
2020-04-15 14:35:29 +02:00
|
|
|
self.url_preview_accept_language = config.get(
|
|
|
|
"url_preview_accept_language"
|
|
|
|
) or ["en"]
|
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
def generate_config_section(self, data_dir_path, **kwargs):
|
2018-12-21 16:04:57 +01:00
|
|
|
media_store = os.path.join(data_dir_path, "media_store")
|
|
|
|
uploads_path = os.path.join(data_dir_path, "uploads")
|
2019-03-19 11:06:40 +01:00
|
|
|
|
|
|
|
formatted_thumbnail_sizes = "".join(
|
|
|
|
THUMBNAIL_SIZE_YAML % s for s in DEFAULT_THUMBNAIL_SIZES
|
|
|
|
)
|
|
|
|
# strip final NL
|
|
|
|
formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
return (
|
|
|
|
r"""
|
2019-08-13 13:49:28 +02:00
|
|
|
## Media Store ##
|
|
|
|
|
|
|
|
# Enable the media store service in the Synapse master. Uncomment the
|
|
|
|
# following if you are using a separate media store worker.
|
|
|
|
#
|
|
|
|
#enable_media_repo: false
|
|
|
|
|
2015-04-30 05:24:44 +02:00
|
|
|
# Directory where uploaded images and attachments are stored.
|
2019-02-19 14:54:29 +01:00
|
|
|
#
|
2015-04-30 05:24:44 +02:00
|
|
|
media_store_path: "%(media_store)s"
|
|
|
|
|
2018-01-16 16:44:08 +01:00
|
|
|
# Media storage providers allow media to be stored in different
|
|
|
|
# locations.
|
2019-02-19 14:54:29 +01:00
|
|
|
#
|
|
|
|
#media_storage_providers:
|
|
|
|
# - module: file_system
|
2020-04-17 13:09:33 +02:00
|
|
|
# # Whether to store newly uploaded local files
|
2019-02-19 14:54:29 +01:00
|
|
|
# store_local: false
|
2020-04-17 13:09:33 +02:00
|
|
|
# # Whether to store newly downloaded remote files
|
2019-02-19 14:54:29 +01:00
|
|
|
# store_remote: false
|
2020-04-17 13:09:33 +02:00
|
|
|
# # Whether to wait for successful storage for local uploads
|
2019-02-19 14:54:29 +01:00
|
|
|
# store_synchronous: false
|
|
|
|
# config:
|
|
|
|
# directory: /mnt/some/other/directory
|
2017-10-12 16:20:59 +02:00
|
|
|
|
2015-04-30 05:24:44 +02:00
|
|
|
# The largest allowed upload size in bytes
|
2019-02-19 14:54:29 +01:00
|
|
|
#
|
2019-03-19 11:06:40 +01:00
|
|
|
#max_upload_size: 10M
|
2015-04-30 05:24:44 +02:00
|
|
|
|
|
|
|
# Maximum number of pixels that will be thumbnailed
|
2019-02-19 14:54:29 +01:00
|
|
|
#
|
2019-03-19 11:06:40 +01:00
|
|
|
#max_image_pixels: 32M
|
2015-08-12 11:54:38 +02:00
|
|
|
|
|
|
|
# Whether to generate new thumbnails on the fly to precisely match
|
|
|
|
# the resolution requested by the client. If true then whenever
|
|
|
|
# a new resolution is requested by the client the server will
|
|
|
|
# generate a new thumbnail. If false the server will pick a thumbnail
|
2016-01-25 00:47:27 +01:00
|
|
|
# from a precalculated list.
|
2019-02-19 14:54:29 +01:00
|
|
|
#
|
2019-03-19 11:06:40 +01:00
|
|
|
#dynamic_thumbnails: false
|
2015-08-12 11:55:27 +02:00
|
|
|
|
2019-02-19 14:54:29 +01:00
|
|
|
# List of thumbnails to precalculate when an image is uploaded.
|
|
|
|
#
|
2019-03-19 11:06:40 +01:00
|
|
|
#thumbnail_sizes:
|
|
|
|
%(formatted_thumbnail_sizes)s
|
2016-04-08 19:37:15 +02:00
|
|
|
|
2019-05-03 14:46:50 +02:00
|
|
|
# Is the preview URL API enabled?
|
2019-02-19 14:54:29 +01:00
|
|
|
#
|
2019-05-03 14:46:50 +02:00
|
|
|
# 'false' by default: uncomment the following to enable it (and specify a
|
|
|
|
# url_preview_ip_range_blacklist blacklist).
|
|
|
|
#
|
|
|
|
#url_preview_enabled: true
|
2016-04-08 19:37:15 +02:00
|
|
|
|
|
|
|
# List of IP address CIDR ranges that the URL preview spider is denied
|
|
|
|
# from accessing. There are no defaults: you must explicitly
|
|
|
|
# specify a list for URL previewing to work. You should specify any
|
|
|
|
# internal services in your network that you do not want synapse to try
|
|
|
|
# to connect to, otherwise anyone in any Matrix room could cause your
|
|
|
|
# synapse to issue arbitrary GET requests to your internal services,
|
|
|
|
# causing serious security issues.
|
|
|
|
#
|
2019-05-03 16:50:59 +02:00
|
|
|
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
|
|
|
# listed here, since they correspond to unroutable addresses.)
|
|
|
|
#
|
|
|
|
# This must be specified if url_preview_enabled is set. It is recommended that
|
|
|
|
# you uncomment the following list as a starting point.
|
2019-05-03 14:46:50 +02:00
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
#url_preview_ip_range_blacklist:
|
|
|
|
# - '127.0.0.0/8'
|
|
|
|
# - '10.0.0.0/8'
|
|
|
|
# - '172.16.0.0/12'
|
|
|
|
# - '192.168.0.0/16'
|
|
|
|
# - '100.64.0.0/10'
|
|
|
|
# - '169.254.0.0/16'
|
|
|
|
# - '::1/128'
|
|
|
|
# - 'fe80::/64'
|
|
|
|
# - 'fc00::/7'
|
2019-05-03 14:46:50 +02:00
|
|
|
|
2016-05-01 13:44:24 +02:00
|
|
|
# List of IP address CIDR ranges that the URL preview spider is allowed
|
|
|
|
# to access even if they are specified in url_preview_ip_range_blacklist.
|
|
|
|
# This is useful for specifying exceptions to wide-ranging blacklisted
|
|
|
|
# target IP ranges - e.g. for enabling URL previews for a specific private
|
|
|
|
# website only visible in your network.
|
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
#url_preview_ip_range_whitelist:
|
|
|
|
# - '192.168.1.1'
|
2016-04-08 19:37:15 +02:00
|
|
|
|
|
|
|
# Optional list of URL matches that the URL preview spider is
|
|
|
|
# denied from accessing. You should use url_preview_ip_range_blacklist
|
|
|
|
# in preference to this, otherwise someone could define a public DNS
|
|
|
|
# entry that points to a private IP address and circumvent the blacklist.
|
|
|
|
# This is more useful if you know there is an entire shape of URL that
|
|
|
|
# you know that will never want synapse to try to spider.
|
|
|
|
#
|
|
|
|
# Each list entry is a dictionary of url component attributes as returned
|
|
|
|
# by urlparse.urlsplit as applied to the absolute form of the URL. See
|
|
|
|
# https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
|
|
|
|
# The values of the dictionary are treated as an filename match pattern
|
|
|
|
# applied to that component of URLs, unless they start with a ^ in which
|
|
|
|
# case they are treated as a regular expression match. If all the
|
|
|
|
# specified component matches for a given list item succeed, the URL is
|
|
|
|
# blacklisted.
|
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
#url_preview_url_blacklist:
|
|
|
|
# # blacklist any URL with a username in its URI
|
|
|
|
# - username: '*'
|
2016-04-08 19:37:15 +02:00
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
# # blacklist all *.google.com URLs
|
|
|
|
# - netloc: 'google.com'
|
|
|
|
# - netloc: '*.google.com'
|
2016-04-08 19:37:15 +02:00
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
# # blacklist all plain HTTP URLs
|
|
|
|
# - scheme: 'http'
|
2016-04-08 19:37:15 +02:00
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
# # blacklist http(s)://www.acme.com/foo
|
|
|
|
# - netloc: 'www.acme.com'
|
|
|
|
# path: '/foo'
|
2016-04-08 19:37:15 +02:00
|
|
|
#
|
2019-02-19 14:54:29 +01:00
|
|
|
# # blacklist any URL with a literal IPv4 address
|
|
|
|
# - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
|
2016-04-08 19:37:15 +02:00
|
|
|
|
|
|
|
# The largest allowed URL preview spidering size in bytes
|
2019-03-19 11:06:40 +01:00
|
|
|
#
|
|
|
|
#max_spider_size: 10M
|
2020-04-15 14:35:29 +02:00
|
|
|
|
|
|
|
# A list of values for the Accept-Language HTTP header used when
|
|
|
|
# downloading webpages during URL preview generation. This allows
|
|
|
|
# Synapse to specify the preferred languages that URL previews should
|
|
|
|
# be in when communicating with remote servers.
|
|
|
|
#
|
|
|
|
# Each value is a IETF language tag; a 2-3 letter identifier for a
|
|
|
|
# language, optionally followed by subtags separated by '-', specifying
|
|
|
|
# a country or region variant.
|
|
|
|
#
|
|
|
|
# Multiple values can be provided, and a weight can be added to each by
|
|
|
|
# using quality value syntax (;q=). '*' translates to any language.
|
|
|
|
#
|
|
|
|
# Defaults to "en".
|
|
|
|
#
|
|
|
|
# Example:
|
|
|
|
#
|
|
|
|
# url_preview_accept_language:
|
|
|
|
# - en-UK
|
|
|
|
# - en-US;q=0.9
|
|
|
|
# - fr;q=0.8
|
|
|
|
# - *;q=0.7
|
|
|
|
#
|
|
|
|
url_preview_accept_language:
|
|
|
|
# - en
|
2019-06-20 11:32:02 +02:00
|
|
|
"""
|
|
|
|
% locals()
|
|
|
|
)
|