Merge remote-tracking branch 'origin/develop' into matrix-org-hotfixes
commit
ab0a5f1972
|
@ -67,7 +67,7 @@ jobs:
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2
|
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./book
|
publish_dir: ./book
|
||||||
|
@ -97,7 +97,7 @@ jobs:
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2
|
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./dev-docs/_build/html
|
publish_dir: ./dev-docs/_build/html
|
||||||
|
|
|
@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.158"
|
version = "1.0.159"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9"
|
checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.158"
|
version = "1.0.159"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad"
|
checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -343,9 +343,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.94"
|
version = "1.0.95"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
|
checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
|
@ -0,0 +1 @@
|
||||||
|
Add experimental support for Unix sockets. Contributed by Jason Little.
|
|
@ -0,0 +1 @@
|
||||||
|
Fix copyright year in SSO footer template.
|
|
@ -0,0 +1 @@
|
||||||
|
Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3.
|
|
@ -0,0 +1 @@
|
||||||
|
Bump serde from 1.0.158 to 1.0.159.
|
|
@ -0,0 +1 @@
|
||||||
|
Bump serde_json from 1.0.94 to 1.0.95.
|
|
@ -0,0 +1 @@
|
||||||
|
Build Debian packages for Ubuntu 23.04 (Lunar Lobster).
|
|
@ -0,0 +1 @@
|
||||||
|
Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing.
|
|
@ -0,0 +1 @@
|
||||||
|
Speed up membership queries for users with forgotten rooms.
|
|
@ -0,0 +1 @@
|
||||||
|
Note that Synapse 1.74 queued a rebuild of the user directory tables.
|
|
@ -88,6 +88,22 @@ process, for example:
|
||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Upgrading to v1.81.0
|
||||||
|
|
||||||
|
## Application service path & authentication deprecations
|
||||||
|
|
||||||
|
Synapse now attempts the versioned appservice paths before falling back to the
|
||||||
|
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||||
|
Usage of the legacy routes should be considered deprecated.
|
||||||
|
|
||||||
|
Additionally, Synapse has supported sending the application service access token
|
||||||
|
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||||
|
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||||
|
query parameter. This is insecure and should be considered deprecated.
|
||||||
|
|
||||||
|
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||||
|
application service routes and query parameter authorization.
|
||||||
|
|
||||||
# Upgrading to v1.80.0
|
# Upgrading to v1.80.0
|
||||||
|
|
||||||
## Reporting events error code change
|
## Reporting events error code change
|
||||||
|
@ -183,6 +199,17 @@ Docker images and Debian packages need nothing specific as they already
|
||||||
include or specify ICU as an explicit dependency.
|
include or specify ICU as an explicit dependency.
|
||||||
|
|
||||||
|
|
||||||
|
## User directory rebuild
|
||||||
|
|
||||||
|
Synapse 1.74 queues a background update
|
||||||
|
[to rebuild the user directory](https://github.com/matrix-org/synapse/pull/14643),
|
||||||
|
in order to fix missing or erroneous entries.
|
||||||
|
|
||||||
|
When this update begins, the user directory will be cleared out and rebuilt from
|
||||||
|
scratch. User directory lookups will be incomplete until the rebuild completes.
|
||||||
|
Admins can monitor the rebuild's progress by using the
|
||||||
|
[Background update Admin API](usage/administration/admin_api/background_updates.md#status).
|
||||||
|
|
||||||
# Upgrading to v1.73.0
|
# Upgrading to v1.73.0
|
||||||
|
|
||||||
## Legacy Prometheus metric names have now been removed
|
## Legacy Prometheus metric names have now been removed
|
||||||
|
|
|
@ -28,6 +28,7 @@ DISTS = (
|
||||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
||||||
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20)
|
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20)
|
||||||
|
"ubuntu:lunar", # 23.04 (EOL 2024-01)
|
||||||
)
|
)
|
||||||
|
|
||||||
DESC = """\
|
DESC = """\
|
||||||
|
|
|
@ -41,7 +41,12 @@ from typing_extensions import ParamSpec
|
||||||
|
|
||||||
import twisted
|
import twisted
|
||||||
from twisted.internet import defer, error, reactor as _reactor
|
from twisted.internet import defer, error, reactor as _reactor
|
||||||
from twisted.internet.interfaces import IOpenSSLContextFactory, IReactorSSL, IReactorTCP
|
from twisted.internet.interfaces import (
|
||||||
|
IOpenSSLContextFactory,
|
||||||
|
IReactorSSL,
|
||||||
|
IReactorTCP,
|
||||||
|
IReactorUNIX,
|
||||||
|
)
|
||||||
from twisted.internet.protocol import ServerFactory
|
from twisted.internet.protocol import ServerFactory
|
||||||
from twisted.internet.tcp import Port
|
from twisted.internet.tcp import Port
|
||||||
from twisted.logger import LoggingFile, LogLevel
|
from twisted.logger import LoggingFile, LogLevel
|
||||||
|
@ -56,7 +61,7 @@ from synapse.app.phone_stats_home import start_phone_stats_home
|
||||||
from synapse.config import ConfigError
|
from synapse.config import ConfigError
|
||||||
from synapse.config._base import format_config_error
|
from synapse.config._base import format_config_error
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.server import ListenerConfig, ManholeConfig
|
from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig
|
||||||
from synapse.crypto import context_factory
|
from synapse.crypto import context_factory
|
||||||
from synapse.events.presence_router import load_legacy_presence_router
|
from synapse.events.presence_router import load_legacy_presence_router
|
||||||
from synapse.events.spamcheck import load_legacy_spam_checkers
|
from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||||
|
@ -351,6 +356,28 @@ def listen_tcp(
|
||||||
return r # type: ignore[return-value]
|
return r # type: ignore[return-value]
|
||||||
|
|
||||||
|
|
||||||
|
def listen_unix(
|
||||||
|
path: str,
|
||||||
|
mode: int,
|
||||||
|
factory: ServerFactory,
|
||||||
|
reactor: IReactorUNIX = reactor,
|
||||||
|
backlog: int = 50,
|
||||||
|
) -> List[Port]:
|
||||||
|
"""
|
||||||
|
Create a UNIX socket for a given path and 'mode' permission
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list of twisted.internet.tcp.Port listening for TCP connections
|
||||||
|
"""
|
||||||
|
wantPID = True
|
||||||
|
|
||||||
|
return [
|
||||||
|
# IReactorUNIX returns an object implementing IListeningPort from listenUNIX,
|
||||||
|
# but we know it will be a Port instance.
|
||||||
|
cast(Port, reactor.listenUNIX(path, factory, backlog, mode, wantPID))
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def listen_http(
|
def listen_http(
|
||||||
listener_config: ListenerConfig,
|
listener_config: ListenerConfig,
|
||||||
root_resource: Resource,
|
root_resource: Resource,
|
||||||
|
@ -359,18 +386,13 @@ def listen_http(
|
||||||
context_factory: Optional[IOpenSSLContextFactory],
|
context_factory: Optional[IOpenSSLContextFactory],
|
||||||
reactor: ISynapseReactor = reactor,
|
reactor: ISynapseReactor = reactor,
|
||||||
) -> List[Port]:
|
) -> List[Port]:
|
||||||
port = listener_config.port
|
|
||||||
bind_addresses = listener_config.bind_addresses
|
|
||||||
tls = listener_config.tls
|
|
||||||
|
|
||||||
assert listener_config.http_options is not None
|
assert listener_config.http_options is not None
|
||||||
|
|
||||||
site_tag = listener_config.http_options.tag
|
site_tag = listener_config.get_site_tag()
|
||||||
if site_tag is None:
|
|
||||||
site_tag = str(port)
|
|
||||||
|
|
||||||
site = SynapseSite(
|
site = SynapseSite(
|
||||||
"synapse.access.%s.%s" % ("https" if tls else "http", site_tag),
|
"synapse.access.%s.%s"
|
||||||
|
% ("https" if listener_config.is_tls() else "http", site_tag),
|
||||||
site_tag,
|
site_tag,
|
||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
|
@ -378,25 +400,41 @@ def listen_http(
|
||||||
max_request_body_size=max_request_body_size,
|
max_request_body_size=max_request_body_size,
|
||||||
reactor=reactor,
|
reactor=reactor,
|
||||||
)
|
)
|
||||||
if tls:
|
|
||||||
# refresh_certificate should have been called before this.
|
if isinstance(listener_config, TCPListenerConfig):
|
||||||
assert context_factory is not None
|
if listener_config.is_tls():
|
||||||
ports = listen_ssl(
|
# refresh_certificate should have been called before this.
|
||||||
bind_addresses,
|
assert context_factory is not None
|
||||||
port,
|
ports = listen_ssl(
|
||||||
site,
|
listener_config.bind_addresses,
|
||||||
context_factory,
|
listener_config.port,
|
||||||
reactor=reactor,
|
site,
|
||||||
)
|
context_factory,
|
||||||
logger.info("Synapse now listening on TCP port %d (TLS)", port)
|
reactor=reactor,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Synapse now listening on TCP port %d (TLS)", listener_config.port
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
ports = listen_tcp(
|
||||||
|
listener_config.bind_addresses,
|
||||||
|
listener_config.port,
|
||||||
|
site,
|
||||||
|
reactor=reactor,
|
||||||
|
)
|
||||||
|
logger.info("Synapse now listening on TCP port %d", listener_config.port)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
ports = listen_tcp(
|
ports = listen_unix(
|
||||||
bind_addresses,
|
listener_config.path, listener_config.mode, site, reactor=reactor
|
||||||
port,
|
|
||||||
site,
|
|
||||||
reactor=reactor,
|
|
||||||
)
|
)
|
||||||
logger.info("Synapse now listening on TCP port %d", port)
|
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||||
|
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||||
|
logger.info(
|
||||||
|
"Synapse now listening on Unix Socket at: "
|
||||||
|
f"{ports[0].getHost().name.decode('utf-8')}"
|
||||||
|
)
|
||||||
|
|
||||||
return ports
|
return ports
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ from synapse.app._base import (
|
||||||
from synapse.config._base import ConfigError
|
from synapse.config._base import ConfigError
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.config.server import ListenerConfig
|
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||||
from synapse.federation.transport.server import TransportLayerServer
|
from synapse.federation.transport.server import TransportLayerServer
|
||||||
from synapse.http.server import JsonResource, OptionsResource
|
from synapse.http.server import JsonResource, OptionsResource
|
||||||
from synapse.logging.context import LoggingContext
|
from synapse.logging.context import LoggingContext
|
||||||
|
@ -236,12 +236,18 @@ class GenericWorkerServer(HomeServer):
|
||||||
if listener.type == "http":
|
if listener.type == "http":
|
||||||
self._listen_http(listener)
|
self._listen_http(listener)
|
||||||
elif listener.type == "manhole":
|
elif listener.type == "manhole":
|
||||||
_base.listen_manhole(
|
if isinstance(listener, TCPListenerConfig):
|
||||||
listener.bind_addresses,
|
_base.listen_manhole(
|
||||||
listener.port,
|
listener.bind_addresses,
|
||||||
manhole_settings=self.config.server.manhole_settings,
|
listener.port,
|
||||||
manhole_globals={"hs": self},
|
manhole_settings=self.config.server.manhole_settings,
|
||||||
)
|
manhole_globals={"hs": self},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ConfigError(
|
||||||
|
"Can not using a unix socket for manhole at this time."
|
||||||
|
)
|
||||||
|
|
||||||
elif listener.type == "metrics":
|
elif listener.type == "metrics":
|
||||||
if not self.config.metrics.enable_metrics:
|
if not self.config.metrics.enable_metrics:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
@ -249,10 +255,16 @@ class GenericWorkerServer(HomeServer):
|
||||||
"enable_metrics is not True!"
|
"enable_metrics is not True!"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(
|
if isinstance(listener, TCPListenerConfig):
|
||||||
listener.bind_addresses,
|
_base.listen_metrics(
|
||||||
listener.port,
|
listener.bind_addresses,
|
||||||
)
|
listener.port,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ConfigError(
|
||||||
|
"Can not use a unix socket for metrics at this time."
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.warning("Unsupported listener type: %s", listener.type)
|
logger.warning("Unsupported listener type: %s", listener.type)
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,7 @@ from synapse.app._base import (
|
||||||
)
|
)
|
||||||
from synapse.config._base import ConfigError, format_config_error
|
from synapse.config._base import ConfigError, format_config_error
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.server import ListenerConfig
|
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||||
from synapse.federation.transport.server import TransportLayerServer
|
from synapse.federation.transport.server import TransportLayerServer
|
||||||
from synapse.http.additional_resource import AdditionalResource
|
from synapse.http.additional_resource import AdditionalResource
|
||||||
from synapse.http.server import (
|
from synapse.http.server import (
|
||||||
|
@ -78,14 +78,13 @@ class SynapseHomeServer(HomeServer):
|
||||||
DATASTORE_CLASS = DataStore # type: ignore
|
DATASTORE_CLASS = DataStore # type: ignore
|
||||||
|
|
||||||
def _listener_http(
|
def _listener_http(
|
||||||
self, config: HomeServerConfig, listener_config: ListenerConfig
|
self,
|
||||||
|
config: HomeServerConfig,
|
||||||
|
listener_config: ListenerConfig,
|
||||||
) -> Iterable[Port]:
|
) -> Iterable[Port]:
|
||||||
port = listener_config.port
|
|
||||||
# Must exist since this is an HTTP listener.
|
# Must exist since this is an HTTP listener.
|
||||||
assert listener_config.http_options is not None
|
assert listener_config.http_options is not None
|
||||||
site_tag = listener_config.http_options.tag
|
site_tag = listener_config.get_site_tag()
|
||||||
if site_tag is None:
|
|
||||||
site_tag = str(port)
|
|
||||||
|
|
||||||
# We always include a health resource.
|
# We always include a health resource.
|
||||||
resources: Dict[str, Resource] = {"/health": HealthResource()}
|
resources: Dict[str, Resource] = {"/health": HealthResource()}
|
||||||
|
@ -252,12 +251,17 @@ class SynapseHomeServer(HomeServer):
|
||||||
self._listener_http(self.config, listener)
|
self._listener_http(self.config, listener)
|
||||||
)
|
)
|
||||||
elif listener.type == "manhole":
|
elif listener.type == "manhole":
|
||||||
_base.listen_manhole(
|
if isinstance(listener, TCPListenerConfig):
|
||||||
listener.bind_addresses,
|
_base.listen_manhole(
|
||||||
listener.port,
|
listener.bind_addresses,
|
||||||
manhole_settings=self.config.server.manhole_settings,
|
listener.port,
|
||||||
manhole_globals={"hs": self},
|
manhole_settings=self.config.server.manhole_settings,
|
||||||
)
|
manhole_globals={"hs": self},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ConfigError(
|
||||||
|
"Can not use a unix socket for manhole at this time."
|
||||||
|
)
|
||||||
elif listener.type == "metrics":
|
elif listener.type == "metrics":
|
||||||
if not self.config.metrics.enable_metrics:
|
if not self.config.metrics.enable_metrics:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
@ -265,10 +269,16 @@ class SynapseHomeServer(HomeServer):
|
||||||
"enable_metrics is not True!"
|
"enable_metrics is not True!"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_base.listen_metrics(
|
if isinstance(listener, TCPListenerConfig):
|
||||||
listener.bind_addresses,
|
_base.listen_metrics(
|
||||||
listener.port,
|
listener.bind_addresses,
|
||||||
)
|
listener.port,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ConfigError(
|
||||||
|
"Can not use a unix socket for metrics at this time."
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# this shouldn't happen, as the listener type should have been checked
|
# this shouldn't happen, as the listener type should have been checked
|
||||||
# during parsing
|
# during parsing
|
||||||
|
|
|
@ -17,6 +17,8 @@ import urllib.parse
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Any,
|
Any,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
Iterable,
|
Iterable,
|
||||||
List,
|
List,
|
||||||
|
@ -24,10 +26,11 @@ from typing import (
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
Tuple,
|
Tuple,
|
||||||
|
TypeVar,
|
||||||
)
|
)
|
||||||
|
|
||||||
from prometheus_client import Counter
|
from prometheus_client import Counter
|
||||||
from typing_extensions import TypeGuard
|
from typing_extensions import Concatenate, ParamSpec, TypeGuard
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
||||||
from synapse.api.errors import CodeMessageException, HttpResponseException
|
from synapse.api.errors import CodeMessageException, HttpResponseException
|
||||||
|
@ -78,7 +81,11 @@ sent_todevice_counter = Counter(
|
||||||
HOUR_IN_MS = 60 * 60 * 1000
|
HOUR_IN_MS = 60 * 60 * 1000
|
||||||
|
|
||||||
|
|
||||||
APP_SERVICE_PREFIX = "/_matrix/app/unstable"
|
APP_SERVICE_PREFIX = "/_matrix/app/v1"
|
||||||
|
APP_SERVICE_UNSTABLE_PREFIX = "/_matrix/app/unstable"
|
||||||
|
|
||||||
|
P = ParamSpec("P")
|
||||||
|
R = TypeVar("R")
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
||||||
|
@ -121,6 +128,47 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _send_with_fallbacks(
|
||||||
|
self,
|
||||||
|
service: "ApplicationService",
|
||||||
|
prefixes: List[str],
|
||||||
|
path: str,
|
||||||
|
func: Callable[Concatenate[str, P], Awaitable[R]],
|
||||||
|
*args: P.args,
|
||||||
|
**kwargs: P.kwargs,
|
||||||
|
) -> R:
|
||||||
|
"""
|
||||||
|
Attempt to call an application service with multiple paths, falling back
|
||||||
|
until one succeeds.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service: The appliacation service, this provides the base URL.
|
||||||
|
prefixes: A last of paths to try in order for the requests.
|
||||||
|
path: A suffix to append to each prefix.
|
||||||
|
func: The function to call, the first argument will be the full
|
||||||
|
endpoint to fetch. Other arguments are provided by args/kwargs.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The return value of func.
|
||||||
|
"""
|
||||||
|
for i, prefix in enumerate(prefixes, start=1):
|
||||||
|
uri = f"{service.url}{prefix}{path}"
|
||||||
|
try:
|
||||||
|
return await func(uri, *args, **kwargs)
|
||||||
|
except HttpResponseException as e:
|
||||||
|
# If an error is received that is due to an unrecognised path,
|
||||||
|
# fallback to next path (if one exists). Otherwise, consider it
|
||||||
|
# a legitimate error and raise.
|
||||||
|
if i < len(prefixes) and is_unknown_endpoint(e):
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
# Unexpected exceptions get sent to the caller.
|
||||||
|
raise
|
||||||
|
|
||||||
|
# The function should always exit via the return or raise above this.
|
||||||
|
raise RuntimeError("Unexpected fallback behaviour. This should never be seen.")
|
||||||
|
|
||||||
async def query_user(self, service: "ApplicationService", user_id: str) -> bool:
|
async def query_user(self, service: "ApplicationService", user_id: str) -> bool:
|
||||||
if service.url is None:
|
if service.url is None:
|
||||||
return False
|
return False
|
||||||
|
@ -128,10 +176,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
# This is required by the configuration.
|
# This is required by the configuration.
|
||||||
assert service.hs_token is not None
|
assert service.hs_token is not None
|
||||||
|
|
||||||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
|
||||||
try:
|
try:
|
||||||
response = await self.get_json(
|
response = await self._send_with_fallbacks(
|
||||||
uri,
|
service,
|
||||||
|
[APP_SERVICE_PREFIX, ""],
|
||||||
|
f"/users/{urllib.parse.quote(user_id)}",
|
||||||
|
self.get_json,
|
||||||
{"access_token": service.hs_token},
|
{"access_token": service.hs_token},
|
||||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||||
)
|
)
|
||||||
|
@ -140,9 +190,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
if e.code == 404:
|
if e.code == 404:
|
||||||
return False
|
return False
|
||||||
logger.warning("query_user to %s received %s", uri, e.code)
|
logger.warning("query_user to %s received %s", service.url, e.code)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning("query_user to %s threw exception %s", uri, ex)
|
logger.warning("query_user to %s threw exception %s", service.url, ex)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def query_alias(self, service: "ApplicationService", alias: str) -> bool:
|
async def query_alias(self, service: "ApplicationService", alias: str) -> bool:
|
||||||
|
@ -152,21 +202,23 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
# This is required by the configuration.
|
# This is required by the configuration.
|
||||||
assert service.hs_token is not None
|
assert service.hs_token is not None
|
||||||
|
|
||||||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
|
||||||
try:
|
try:
|
||||||
response = await self.get_json(
|
response = await self._send_with_fallbacks(
|
||||||
uri,
|
service,
|
||||||
|
[APP_SERVICE_PREFIX, ""],
|
||||||
|
f"/rooms/{urllib.parse.quote(alias)}",
|
||||||
|
self.get_json,
|
||||||
{"access_token": service.hs_token},
|
{"access_token": service.hs_token},
|
||||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||||
)
|
)
|
||||||
if response is not None: # just an empty json object
|
if response is not None: # just an empty json object
|
||||||
return True
|
return True
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
logger.warning("query_alias to %s received %s", uri, e.code)
|
logger.warning("query_alias to %s received %s", service.url, e.code)
|
||||||
if e.code == 404:
|
if e.code == 404:
|
||||||
return False
|
return False
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
logger.warning("query_alias to %s threw exception %s", service.url, ex)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def query_3pe(
|
async def query_3pe(
|
||||||
|
@ -188,25 +240,24 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
# This is required by the configuration.
|
# This is required by the configuration.
|
||||||
assert service.hs_token is not None
|
assert service.hs_token is not None
|
||||||
|
|
||||||
uri = "%s%s/thirdparty/%s/%s" % (
|
|
||||||
service.url,
|
|
||||||
APP_SERVICE_PREFIX,
|
|
||||||
kind,
|
|
||||||
urllib.parse.quote(protocol),
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
args: Mapping[Any, Any] = {
|
args: Mapping[Any, Any] = {
|
||||||
**fields,
|
**fields,
|
||||||
b"access_token": service.hs_token,
|
b"access_token": service.hs_token,
|
||||||
}
|
}
|
||||||
response = await self.get_json(
|
response = await self._send_with_fallbacks(
|
||||||
uri,
|
service,
|
||||||
|
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||||
|
f"/thirdparty/{kind}/{urllib.parse.quote(protocol)}",
|
||||||
|
self.get_json,
|
||||||
args=args,
|
args=args,
|
||||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||||
)
|
)
|
||||||
if not isinstance(response, list):
|
if not isinstance(response, list):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"query_3pe to %s returned an invalid response %r", uri, response
|
"query_3pe to %s returned an invalid response %r",
|
||||||
|
service.url,
|
||||||
|
response,
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -216,12 +267,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
ret.append(r)
|
ret.append(r)
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"query_3pe to %s returned an invalid result %r", uri, r
|
"query_3pe to %s returned an invalid result %r", service.url, r
|
||||||
)
|
)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning("query_3pe to %s threw exception %s", uri, ex)
|
logger.warning("query_3pe to %s threw exception %s", service.url, ex)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
async def get_3pe_protocol(
|
async def get_3pe_protocol(
|
||||||
|
@ -233,21 +284,20 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
async def _get() -> Optional[JsonDict]:
|
async def _get() -> Optional[JsonDict]:
|
||||||
# This is required by the configuration.
|
# This is required by the configuration.
|
||||||
assert service.hs_token is not None
|
assert service.hs_token is not None
|
||||||
uri = "%s%s/thirdparty/protocol/%s" % (
|
|
||||||
service.url,
|
|
||||||
APP_SERVICE_PREFIX,
|
|
||||||
urllib.parse.quote(protocol),
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
info = await self.get_json(
|
info = await self._send_with_fallbacks(
|
||||||
uri,
|
service,
|
||||||
|
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||||
|
f"/thirdparty/protocol/{urllib.parse.quote(protocol)}",
|
||||||
|
self.get_json,
|
||||||
{"access_token": service.hs_token},
|
{"access_token": service.hs_token},
|
||||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||||
)
|
)
|
||||||
|
|
||||||
if not _is_valid_3pe_metadata(info):
|
if not _is_valid_3pe_metadata(info):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"query_3pe_protocol to %s did not return a valid result", uri
|
"query_3pe_protocol to %s did not return a valid result",
|
||||||
|
service.url,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -260,7 +310,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
|
|
||||||
return info
|
return info
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
logger.warning(
|
||||||
|
"query_3pe_protocol to %s threw exception %s", service.url, ex
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
key = (service.id, protocol)
|
key = (service.id, protocol)
|
||||||
|
@ -274,7 +326,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
assert service.hs_token is not None
|
assert service.hs_token is not None
|
||||||
|
|
||||||
await self.post_json_get_json(
|
await self.post_json_get_json(
|
||||||
uri=service.url + "/_matrix/app/unstable/fi.mau.msc2659/ping",
|
uri=f"{service.url}{APP_SERVICE_UNSTABLE_PREFIX}/fi.mau.msc2659/ping",
|
||||||
post_json={"transaction_id": txn_id},
|
post_json={"transaction_id": txn_id},
|
||||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||||
)
|
)
|
||||||
|
@ -318,8 +370,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
)
|
)
|
||||||
txn_id = 0
|
txn_id = 0
|
||||||
|
|
||||||
uri = service.url + ("/transactions/%s" % urllib.parse.quote(str(txn_id)))
|
|
||||||
|
|
||||||
# Never send ephemeral events to appservices that do not support it
|
# Never send ephemeral events to appservices that do not support it
|
||||||
body: JsonDict = {"events": serialized_events}
|
body: JsonDict = {"events": serialized_events}
|
||||||
if service.supports_ephemeral:
|
if service.supports_ephemeral:
|
||||||
|
@ -351,8 +401,11 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.put_json(
|
await self._send_with_fallbacks(
|
||||||
uri=uri,
|
service,
|
||||||
|
[APP_SERVICE_PREFIX, ""],
|
||||||
|
f"/transactions/{urllib.parse.quote(str(txn_id))}",
|
||||||
|
self.put_json,
|
||||||
json_body=body,
|
json_body=body,
|
||||||
args={"access_token": service.hs_token},
|
args={"access_token": service.hs_token},
|
||||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||||
|
@ -360,7 +413,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"push_bulk to %s succeeded! events=%s",
|
"push_bulk to %s succeeded! events=%s",
|
||||||
uri,
|
service.url,
|
||||||
[event.get("event_id") for event in events],
|
[event.get("event_id") for event in events],
|
||||||
)
|
)
|
||||||
sent_transactions_counter.labels(service.id).inc()
|
sent_transactions_counter.labels(service.id).inc()
|
||||||
|
@ -371,7 +424,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"push_bulk to %s received code=%s msg=%s",
|
"push_bulk to %s received code=%s msg=%s",
|
||||||
uri,
|
service.url,
|
||||||
e.code,
|
e.code,
|
||||||
e.msg,
|
e.msg,
|
||||||
exc_info=logger.isEnabledFor(logging.DEBUG),
|
exc_info=logger.isEnabledFor(logging.DEBUG),
|
||||||
|
@ -379,7 +432,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"push_bulk to %s threw exception(%s) %s args=%s",
|
"push_bulk to %s threw exception(%s) %s args=%s",
|
||||||
uri,
|
service.url,
|
||||||
type(ex).__name__,
|
type(ex).__name__,
|
||||||
ex,
|
ex,
|
||||||
ex.args,
|
ex.args,
|
||||||
|
|
|
@ -214,17 +214,52 @@ class HttpListenerConfig:
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
class ListenerConfig:
|
class TCPListenerConfig:
|
||||||
"""Object describing the configuration of a single listener."""
|
"""Object describing the configuration of a single TCP listener."""
|
||||||
|
|
||||||
port: int = attr.ib(validator=attr.validators.instance_of(int))
|
port: int = attr.ib(validator=attr.validators.instance_of(int))
|
||||||
bind_addresses: List[str]
|
bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List))
|
||||||
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||||
tls: bool = False
|
tls: bool = False
|
||||||
|
|
||||||
# http_options is only populated if type=http
|
# http_options is only populated if type=http
|
||||||
http_options: Optional[HttpListenerConfig] = None
|
http_options: Optional[HttpListenerConfig] = None
|
||||||
|
|
||||||
|
def get_site_tag(self) -> str:
|
||||||
|
"""Retrieves http_options.tag if it exists, otherwise the port number."""
|
||||||
|
if self.http_options and self.http_options.tag is not None:
|
||||||
|
return self.http_options.tag
|
||||||
|
else:
|
||||||
|
return str(self.port)
|
||||||
|
|
||||||
|
def is_tls(self) -> bool:
|
||||||
|
return self.tls
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
|
class UnixListenerConfig:
|
||||||
|
"""Object describing the configuration of a single Unix socket listener."""
|
||||||
|
|
||||||
|
# Note: unix sockets can not be tls encrypted, so HAVE to be behind a tls-handling
|
||||||
|
# reverse proxy
|
||||||
|
path: str = attr.ib()
|
||||||
|
# A default(0o666) for this is set in parse_listener_def() below
|
||||||
|
mode: int
|
||||||
|
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||||
|
|
||||||
|
# http_options is only populated if type=http
|
||||||
|
http_options: Optional[HttpListenerConfig] = None
|
||||||
|
|
||||||
|
def get_site_tag(self) -> str:
|
||||||
|
return "unix"
|
||||||
|
|
||||||
|
def is_tls(self) -> bool:
|
||||||
|
"""Unix sockets can't have TLS"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
ListenerConfig = Union[TCPListenerConfig, UnixListenerConfig]
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
class ManholeConfig:
|
class ManholeConfig:
|
||||||
|
@ -531,12 +566,12 @@ class ServerConfig(Config):
|
||||||
|
|
||||||
self.listeners = [parse_listener_def(i, x) for i, x in enumerate(listeners)]
|
self.listeners = [parse_listener_def(i, x) for i, x in enumerate(listeners)]
|
||||||
|
|
||||||
# no_tls is not really supported any more, but let's grandfather it in
|
# no_tls is not really supported anymore, but let's grandfather it in here.
|
||||||
# here.
|
|
||||||
if config.get("no_tls", False):
|
if config.get("no_tls", False):
|
||||||
l2 = []
|
l2 = []
|
||||||
for listener in self.listeners:
|
for listener in self.listeners:
|
||||||
if listener.tls:
|
if isinstance(listener, TCPListenerConfig) and listener.tls:
|
||||||
|
# Use isinstance() as the assertion this *has* a listener.port
|
||||||
logger.info(
|
logger.info(
|
||||||
"Ignoring TLS-enabled listener on port %i due to no_tls",
|
"Ignoring TLS-enabled listener on port %i due to no_tls",
|
||||||
listener.port,
|
listener.port,
|
||||||
|
@ -577,7 +612,7 @@ class ServerConfig(Config):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.listeners.append(
|
self.listeners.append(
|
||||||
ListenerConfig(
|
TCPListenerConfig(
|
||||||
port=bind_port,
|
port=bind_port,
|
||||||
bind_addresses=[bind_host],
|
bind_addresses=[bind_host],
|
||||||
tls=True,
|
tls=True,
|
||||||
|
@ -589,7 +624,7 @@ class ServerConfig(Config):
|
||||||
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
||||||
if unsecure_port:
|
if unsecure_port:
|
||||||
self.listeners.append(
|
self.listeners.append(
|
||||||
ListenerConfig(
|
TCPListenerConfig(
|
||||||
port=unsecure_port,
|
port=unsecure_port,
|
||||||
bind_addresses=[bind_host],
|
bind_addresses=[bind_host],
|
||||||
tls=False,
|
tls=False,
|
||||||
|
@ -601,7 +636,7 @@ class ServerConfig(Config):
|
||||||
manhole = config.get("manhole")
|
manhole = config.get("manhole")
|
||||||
if manhole:
|
if manhole:
|
||||||
self.listeners.append(
|
self.listeners.append(
|
||||||
ListenerConfig(
|
TCPListenerConfig(
|
||||||
port=manhole,
|
port=manhole,
|
||||||
bind_addresses=["127.0.0.1"],
|
bind_addresses=["127.0.0.1"],
|
||||||
type="manhole",
|
type="manhole",
|
||||||
|
@ -648,7 +683,7 @@ class ServerConfig(Config):
|
||||||
logger.warning(METRICS_PORT_WARNING)
|
logger.warning(METRICS_PORT_WARNING)
|
||||||
|
|
||||||
self.listeners.append(
|
self.listeners.append(
|
||||||
ListenerConfig(
|
TCPListenerConfig(
|
||||||
port=metrics_port,
|
port=metrics_port,
|
||||||
bind_addresses=[config.get("metrics_bind_host", "127.0.0.1")],
|
bind_addresses=[config.get("metrics_bind_host", "127.0.0.1")],
|
||||||
type="http",
|
type="http",
|
||||||
|
@ -724,7 +759,7 @@ class ServerConfig(Config):
|
||||||
self.delete_stale_devices_after = None
|
self.delete_stale_devices_after = None
|
||||||
|
|
||||||
def has_tls_listener(self) -> bool:
|
def has_tls_listener(self) -> bool:
|
||||||
return any(listener.tls for listener in self.listeners)
|
return any(listener.is_tls() for listener in self.listeners)
|
||||||
|
|
||||||
def generate_config_section(
|
def generate_config_section(
|
||||||
self,
|
self,
|
||||||
|
@ -904,25 +939,25 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||||
raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type"))
|
raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type"))
|
||||||
|
|
||||||
port = listener.get("port")
|
port = listener.get("port")
|
||||||
if type(port) is not int:
|
socket_path = listener.get("path")
|
||||||
|
# Either a port or a path should be declared at a minimum. Using both would be bad.
|
||||||
|
if port is not None and not isinstance(port, int):
|
||||||
raise ConfigError("Listener configuration is lacking a valid 'port' option")
|
raise ConfigError("Listener configuration is lacking a valid 'port' option")
|
||||||
|
if socket_path is not None and not isinstance(socket_path, str):
|
||||||
|
raise ConfigError("Listener configuration is lacking a valid 'path' option")
|
||||||
|
if port and socket_path:
|
||||||
|
raise ConfigError(
|
||||||
|
"Can not have both a UNIX socket and an IP/port declared for the same "
|
||||||
|
"resource!"
|
||||||
|
)
|
||||||
|
if port is None and socket_path is None:
|
||||||
|
raise ConfigError(
|
||||||
|
"Must have either a UNIX socket or an IP/port declared for a given "
|
||||||
|
"resource!"
|
||||||
|
)
|
||||||
|
|
||||||
tls = listener.get("tls", False)
|
tls = listener.get("tls", False)
|
||||||
|
|
||||||
bind_addresses = listener.get("bind_addresses", [])
|
|
||||||
bind_address = listener.get("bind_address")
|
|
||||||
# if bind_address was specified, add it to the list of addresses
|
|
||||||
if bind_address:
|
|
||||||
bind_addresses.append(bind_address)
|
|
||||||
|
|
||||||
# if we still have an empty list of addresses, use the default list
|
|
||||||
if not bind_addresses:
|
|
||||||
if listener_type == "metrics":
|
|
||||||
# the metrics listener doesn't support IPv6
|
|
||||||
bind_addresses.append("0.0.0.0")
|
|
||||||
else:
|
|
||||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
|
||||||
|
|
||||||
http_config = None
|
http_config = None
|
||||||
if listener_type == "http":
|
if listener_type == "http":
|
||||||
try:
|
try:
|
||||||
|
@ -932,8 +967,12 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ConfigError("Unknown listener resource") from e
|
raise ConfigError("Unknown listener resource") from e
|
||||||
|
|
||||||
|
# For a unix socket, default x_forwarded to True, as this is the only way of
|
||||||
|
# getting a client IP.
|
||||||
|
# Note: a reverse proxy is required anyway, as there is no way of exposing a
|
||||||
|
# unix socket to the internet.
|
||||||
http_config = HttpListenerConfig(
|
http_config = HttpListenerConfig(
|
||||||
x_forwarded=listener.get("x_forwarded", False),
|
x_forwarded=listener.get("x_forwarded", (True if socket_path else False)),
|
||||||
resources=resources,
|
resources=resources,
|
||||||
additional_resources=listener.get("additional_resources", {}),
|
additional_resources=listener.get("additional_resources", {}),
|
||||||
tag=listener.get("tag"),
|
tag=listener.get("tag"),
|
||||||
|
@ -941,7 +980,30 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
|
||||||
experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False),
|
experimental_cors_msc3886=listener.get("experimental_cors_msc3886", False),
|
||||||
)
|
)
|
||||||
|
|
||||||
return ListenerConfig(port, bind_addresses, listener_type, tls, http_config)
|
if socket_path:
|
||||||
|
# TODO: Add in path validation, like if the directory exists and is writable?
|
||||||
|
# Set a default for the permission, in case it's left out
|
||||||
|
socket_mode = listener.get("mode", 0o666)
|
||||||
|
|
||||||
|
return UnixListenerConfig(socket_path, socket_mode, listener_type, http_config)
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert port is not None
|
||||||
|
bind_addresses = listener.get("bind_addresses", [])
|
||||||
|
bind_address = listener.get("bind_address")
|
||||||
|
# if bind_address was specified, add it to the list of addresses
|
||||||
|
if bind_address:
|
||||||
|
bind_addresses.append(bind_address)
|
||||||
|
|
||||||
|
# if we still have an empty list of addresses, use the default list
|
||||||
|
if not bind_addresses:
|
||||||
|
if listener_type == "metrics":
|
||||||
|
# the metrics listener doesn't support IPv6
|
||||||
|
bind_addresses.append("0.0.0.0")
|
||||||
|
else:
|
||||||
|
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||||
|
|
||||||
|
return TCPListenerConfig(port, bind_addresses, listener_type, tls, http_config)
|
||||||
|
|
||||||
|
|
||||||
_MANHOLE_SETTINGS_SCHEMA = {
|
_MANHOLE_SETTINGS_SCHEMA = {
|
||||||
|
|
|
@ -19,15 +19,18 @@ from typing import Any, Dict, List, Union
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
from synapse.types import JsonDict
|
from synapse.config._base import (
|
||||||
|
|
||||||
from ._base import (
|
|
||||||
Config,
|
Config,
|
||||||
ConfigError,
|
ConfigError,
|
||||||
RoutableShardedWorkerHandlingConfig,
|
RoutableShardedWorkerHandlingConfig,
|
||||||
ShardedWorkerHandlingConfig,
|
ShardedWorkerHandlingConfig,
|
||||||
)
|
)
|
||||||
from .server import DIRECT_TCP_ERROR, ListenerConfig, parse_listener_def
|
from synapse.config.server import (
|
||||||
|
DIRECT_TCP_ERROR,
|
||||||
|
TCPListenerConfig,
|
||||||
|
parse_listener_def,
|
||||||
|
)
|
||||||
|
from synapse.types import JsonDict
|
||||||
|
|
||||||
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
|
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
|
||||||
The '%s' configuration option is deprecated and will be removed in a future
|
The '%s' configuration option is deprecated and will be removed in a future
|
||||||
|
@ -161,7 +164,7 @@ class WorkerConfig(Config):
|
||||||
manhole = config.get("worker_manhole")
|
manhole = config.get("worker_manhole")
|
||||||
if manhole:
|
if manhole:
|
||||||
self.worker_listeners.append(
|
self.worker_listeners.append(
|
||||||
ListenerConfig(
|
TCPListenerConfig(
|
||||||
port=manhole,
|
port=manhole,
|
||||||
bind_addresses=["127.0.0.1"],
|
bind_addresses=["127.0.0.1"],
|
||||||
type="manhole",
|
type="manhole",
|
||||||
|
|
|
@ -943,6 +943,8 @@ class SyncHandler:
|
||||||
|
|
||||||
timeline_state = {}
|
timeline_state = {}
|
||||||
|
|
||||||
|
# Membership events to fetch that can be found in the room state, or in
|
||||||
|
# the case of partial state rooms, the auth events of timeline events.
|
||||||
members_to_fetch = set()
|
members_to_fetch = set()
|
||||||
first_event_by_sender_map = {}
|
first_event_by_sender_map = {}
|
||||||
for event in batch.events:
|
for event in batch.events:
|
||||||
|
@ -964,9 +966,19 @@ class SyncHandler:
|
||||||
# (if we are) to fix https://github.com/vector-im/riot-web/issues/7209
|
# (if we are) to fix https://github.com/vector-im/riot-web/issues/7209
|
||||||
# We only need apply this on full state syncs given we disabled
|
# We only need apply this on full state syncs given we disabled
|
||||||
# LL for incr syncs in #3840.
|
# LL for incr syncs in #3840.
|
||||||
members_to_fetch.add(sync_config.user.to_string())
|
# We don't insert ourselves into `members_to_fetch`, because in some
|
||||||
|
# rare cases (an empty event batch with a now_token after the user's
|
||||||
state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch)
|
# leave in a partial state room which another local user has
|
||||||
|
# joined), the room state will be missing our membership and there
|
||||||
|
# is no guarantee that our membership will be in the auth events of
|
||||||
|
# timeline events when the room is partial stated.
|
||||||
|
state_filter = StateFilter.from_lazy_load_member_list(
|
||||||
|
members_to_fetch.union((sync_config.user.to_string(),))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
state_filter = StateFilter.from_lazy_load_member_list(
|
||||||
|
members_to_fetch
|
||||||
|
)
|
||||||
|
|
||||||
# We are happy to use partial state to compute the `/sync` response.
|
# We are happy to use partial state to compute the `/sync` response.
|
||||||
# Since partial state may not include the lazy-loaded memberships we
|
# Since partial state may not include the lazy-loaded memberships we
|
||||||
|
@ -988,7 +1000,9 @@ class SyncHandler:
|
||||||
# sync's timeline and the start of the current sync's timeline.
|
# sync's timeline and the start of the current sync's timeline.
|
||||||
# See the docstring above for details.
|
# See the docstring above for details.
|
||||||
state_ids: StateMap[str]
|
state_ids: StateMap[str]
|
||||||
|
# We need to know whether the state we fetch may be partial, so check
|
||||||
|
# whether the room is partial stated *before* fetching it.
|
||||||
|
is_partial_state_room = await self.store.is_partial_state_room(room_id)
|
||||||
if full_state:
|
if full_state:
|
||||||
if batch:
|
if batch:
|
||||||
state_at_timeline_end = (
|
state_at_timeline_end = (
|
||||||
|
@ -1119,7 +1133,7 @@ class SyncHandler:
|
||||||
# If we only have partial state for the room, `state_ids` may be missing the
|
# If we only have partial state for the room, `state_ids` may be missing the
|
||||||
# memberships we wanted. We attempt to find some by digging through the auth
|
# memberships we wanted. We attempt to find some by digging through the auth
|
||||||
# events of timeline events.
|
# events of timeline events.
|
||||||
if lazy_load_members and await self.store.is_partial_state_room(room_id):
|
if lazy_load_members and is_partial_state_room:
|
||||||
assert members_to_fetch is not None
|
assert members_to_fetch is not None
|
||||||
assert first_event_by_sender_map is not None
|
assert first_event_by_sender_map is not None
|
||||||
|
|
||||||
|
|
|
@ -982,20 +982,21 @@ def is_unknown_endpoint(
|
||||||
"""
|
"""
|
||||||
if synapse_error is None:
|
if synapse_error is None:
|
||||||
synapse_error = e.to_synapse_error()
|
synapse_error = e.to_synapse_error()
|
||||||
# MSC3743 specifies that servers should return a 404 or 405 with an errcode
|
|
||||||
|
# Matrix v1.6 specifies that servers should return a 404 or 405 with an errcode
|
||||||
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
||||||
# to an unknown method, respectively.
|
# to an unknown method, respectively.
|
||||||
#
|
#
|
||||||
# Older versions of servers don't properly handle this. This needs to be
|
# Older versions of servers don't return proper errors, so be graceful. But,
|
||||||
# rather specific as some endpoints truly do return 404 errors.
|
# also handle that some endpoints truly do return 404 errors.
|
||||||
return (
|
return (
|
||||||
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
||||||
(e.code == 404 or e.code == 405)
|
(e.code == 404 or e.code == 405)
|
||||||
and (
|
and (
|
||||||
# Older Dendrites returned a text body or empty body.
|
# Consider empty body or non-JSON bodies to be unrecognised (matches
|
||||||
# Older Conduit returned an empty body.
|
# older Dendrites & Conduits).
|
||||||
not e.response
|
not e.response
|
||||||
or e.response == b"404 page not found"
|
or not e.response.startswith(b"{")
|
||||||
# The proper response JSON with M_UNRECOGNIZED errcode.
|
# The proper response JSON with M_UNRECOGNIZED errcode.
|
||||||
or synapse_error.errcode == Codes.UNRECOGNIZED
|
or synapse_error.errcode == Codes.UNRECOGNIZED
|
||||||
)
|
)
|
||||||
|
|
|
@ -19,6 +19,7 @@ from typing import TYPE_CHECKING, Any, Generator, Optional, Tuple, Union
|
||||||
import attr
|
import attr
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
|
|
||||||
|
from twisted.internet.address import UNIXAddress
|
||||||
from twisted.internet.defer import Deferred
|
from twisted.internet.defer import Deferred
|
||||||
from twisted.internet.interfaces import IAddress, IReactorTime
|
from twisted.internet.interfaces import IAddress, IReactorTime
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
@ -257,7 +258,7 @@ class SynapseRequest(Request):
|
||||||
request_id,
|
request_id,
|
||||||
request=ContextRequest(
|
request=ContextRequest(
|
||||||
request_id=request_id,
|
request_id=request_id,
|
||||||
ip_address=self.getClientAddress().host,
|
ip_address=self.get_client_ip_if_available(),
|
||||||
site_tag=self.synapse_site.site_tag,
|
site_tag=self.synapse_site.site_tag,
|
||||||
# The requester is going to be unknown at this point.
|
# The requester is going to be unknown at this point.
|
||||||
requester=None,
|
requester=None,
|
||||||
|
@ -414,7 +415,7 @@ class SynapseRequest(Request):
|
||||||
|
|
||||||
self.synapse_site.access_logger.debug(
|
self.synapse_site.access_logger.debug(
|
||||||
"%s - %s - Received request: %s %s",
|
"%s - %s - Received request: %s %s",
|
||||||
self.getClientAddress().host,
|
self.get_client_ip_if_available(),
|
||||||
self.synapse_site.site_tag,
|
self.synapse_site.site_tag,
|
||||||
self.get_method(),
|
self.get_method(),
|
||||||
self.get_redacted_uri(),
|
self.get_redacted_uri(),
|
||||||
|
@ -462,7 +463,7 @@ class SynapseRequest(Request):
|
||||||
"%s - %s - {%s}"
|
"%s - %s - {%s}"
|
||||||
" Processed request: %.3fsec/%.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)"
|
" Processed request: %.3fsec/%.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)"
|
||||||
' %sB %s "%s %s %s" "%s" [%d dbevts]',
|
' %sB %s "%s %s %s" "%s" [%d dbevts]',
|
||||||
self.getClientAddress().host,
|
self.get_client_ip_if_available(),
|
||||||
self.synapse_site.site_tag,
|
self.synapse_site.site_tag,
|
||||||
requester,
|
requester,
|
||||||
processing_time,
|
processing_time,
|
||||||
|
@ -500,6 +501,26 @@ class SynapseRequest(Request):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def get_client_ip_if_available(self) -> str:
|
||||||
|
"""Logging helper. Return something useful when a client IP is not retrievable
|
||||||
|
from a unix socket.
|
||||||
|
|
||||||
|
In practice, this returns the socket file path on a SynapseRequest if using a
|
||||||
|
unix socket and the normal IP address for TCP sockets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# getClientAddress().host returns a proper IP address for a TCP socket. But
|
||||||
|
# unix sockets have no concept of IP addresses or ports and return a
|
||||||
|
# UNIXAddress containing a 'None' value. In order to get something usable for
|
||||||
|
# logs(where this is used) get the unix socket file. getHost() returns a
|
||||||
|
# UNIXAddress containing a value of the socket file and has an instance
|
||||||
|
# variable of 'name' encoded as a byte string containing the path we want.
|
||||||
|
# Decode to utf-8 so it looks nice.
|
||||||
|
if isinstance(self.getClientAddress(), UNIXAddress):
|
||||||
|
return self.getHost().name.decode("utf-8")
|
||||||
|
else:
|
||||||
|
return self.getClientAddress().host
|
||||||
|
|
||||||
|
|
||||||
class XForwardedForRequest(SynapseRequest):
|
class XForwardedForRequest(SynapseRequest):
|
||||||
"""Request object which honours proxy headers
|
"""Request object which honours proxy headers
|
||||||
|
|
|
@ -15,5 +15,5 @@
|
||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
</svg>
|
</svg>
|
||||||
<p>An open network for secure, decentralized communication.<br>© 2022 The Matrix.org Foundation C.I.C.</p>
|
<p>An open network for secure, decentralized communication.<br>© 2023 The Matrix.org Foundation C.I.C.</p>
|
||||||
</footer>
|
</footer>
|
||||||
|
|
|
@ -419,7 +419,11 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
# Now we filter out forgotten and excluded rooms
|
# Now we filter out forgotten and excluded rooms
|
||||||
rooms_to_exclude = await self.get_forgotten_rooms_for_user(user_id)
|
rooms_to_exclude: AbstractSet[str] = set()
|
||||||
|
|
||||||
|
# Users can't forget joined/invited rooms, so we skip the check for such look ups.
|
||||||
|
if any(m not in (Membership.JOIN, Membership.INVITE) for m in membership_list):
|
||||||
|
rooms_to_exclude = await self.get_forgotten_rooms_for_user(user_id)
|
||||||
|
|
||||||
if excluded_rooms is not None:
|
if excluded_rooms is not None:
|
||||||
# Take a copy to avoid mutating the in-cache set
|
# Take a copy to avoid mutating the in-cache set
|
||||||
|
@ -1391,6 +1395,12 @@ class RoomMemberBackgroundUpdateStore(SQLBaseStore):
|
||||||
columns=["user_id", "room_id"],
|
columns=["user_id", "room_id"],
|
||||||
where_clause="forgotten = 1",
|
where_clause="forgotten = 1",
|
||||||
)
|
)
|
||||||
|
self.db_pool.updates.register_background_index_update(
|
||||||
|
"room_membership_user_room_index",
|
||||||
|
index_name="room_membership_user_room_idx",
|
||||||
|
table="room_memberships",
|
||||||
|
columns=["user_id", "room_id"],
|
||||||
|
)
|
||||||
|
|
||||||
async def _background_add_membership_profile(
|
async def _background_add_membership_profile(
|
||||||
self, progress: JsonDict, batch_size: int
|
self, progress: JsonDict, batch_size: int
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
/* Copyright 2023 The Matrix.org Foundation C.I.C
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- Add an index to `room_membership(user_id, room_id)` to make querying for
|
||||||
|
-- forgotten rooms faster.
|
||||||
|
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
|
||||||
|
(7403, 'room_membership_user_room_index', '{}');
|
|
@ -50,6 +50,7 @@ from twisted.internet.interfaces import (
|
||||||
IReactorTCP,
|
IReactorTCP,
|
||||||
IReactorThreads,
|
IReactorThreads,
|
||||||
IReactorTime,
|
IReactorTime,
|
||||||
|
IReactorUNIX,
|
||||||
)
|
)
|
||||||
|
|
||||||
from synapse.api.errors import Codes, SynapseError
|
from synapse.api.errors import Codes, SynapseError
|
||||||
|
@ -91,6 +92,7 @@ StrCollection = Union[Tuple[str, ...], List[str], AbstractSet[str]]
|
||||||
class ISynapseReactor(
|
class ISynapseReactor(
|
||||||
IReactorTCP,
|
IReactorTCP,
|
||||||
IReactorSSL,
|
IReactorSSL,
|
||||||
|
IReactorUNIX,
|
||||||
IReactorPluggableNameResolver,
|
IReactorPluggableNameResolver,
|
||||||
IReactorTime,
|
IReactorTime,
|
||||||
IReactorCore,
|
IReactorCore,
|
||||||
|
|
|
@ -16,6 +16,7 @@ from unittest.mock import Mock
|
||||||
|
|
||||||
from twisted.test.proto_helpers import MemoryReactor
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
|
from synapse.api.errors import HttpResponseException
|
||||||
from synapse.appservice import ApplicationService
|
from synapse.appservice import ApplicationService
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict
|
||||||
|
@ -64,8 +65,8 @@ class ApplicationServiceApiTestCase(unittest.HomeserverTestCase):
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
URL_USER = f"{URL}/_matrix/app/unstable/thirdparty/user/{PROTOCOL}"
|
URL_USER = f"{URL}/_matrix/app/v1/thirdparty/user/{PROTOCOL}"
|
||||||
URL_LOCATION = f"{URL}/_matrix/app/unstable/thirdparty/location/{PROTOCOL}"
|
URL_LOCATION = f"{URL}/_matrix/app/v1/thirdparty/location/{PROTOCOL}"
|
||||||
|
|
||||||
self.request_url = None
|
self.request_url = None
|
||||||
|
|
||||||
|
@ -106,6 +107,58 @@ class ApplicationServiceApiTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(self.request_url, URL_LOCATION)
|
self.assertEqual(self.request_url, URL_LOCATION)
|
||||||
self.assertEqual(result, SUCCESS_RESULT_LOCATION)
|
self.assertEqual(result, SUCCESS_RESULT_LOCATION)
|
||||||
|
|
||||||
|
def test_fallback(self) -> None:
|
||||||
|
"""
|
||||||
|
Tests that the fallback to legacy URLs works.
|
||||||
|
"""
|
||||||
|
SUCCESS_RESULT_USER = [
|
||||||
|
{
|
||||||
|
"protocol": PROTOCOL,
|
||||||
|
"userid": "@a:user",
|
||||||
|
"fields": {
|
||||||
|
"more": "fields",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
URL_USER = f"{URL}/_matrix/app/v1/thirdparty/user/{PROTOCOL}"
|
||||||
|
FALLBACK_URL_USER = f"{URL}/_matrix/app/unstable/thirdparty/user/{PROTOCOL}"
|
||||||
|
|
||||||
|
self.request_url = None
|
||||||
|
self.v1_seen = False
|
||||||
|
|
||||||
|
async def get_json(
|
||||||
|
url: str,
|
||||||
|
args: Mapping[Any, Any],
|
||||||
|
headers: Mapping[Union[str, bytes], Sequence[Union[str, bytes]]],
|
||||||
|
) -> List[JsonDict]:
|
||||||
|
# Ensure the access token is passed as both a header and query arg.
|
||||||
|
if not headers.get("Authorization") or not args.get(b"access_token"):
|
||||||
|
raise RuntimeError("Access token not provided")
|
||||||
|
|
||||||
|
self.assertEqual(headers.get("Authorization"), [f"Bearer {TOKEN}"])
|
||||||
|
self.assertEqual(args.get(b"access_token"), TOKEN)
|
||||||
|
self.request_url = url
|
||||||
|
if url == URL_USER:
|
||||||
|
self.v1_seen = True
|
||||||
|
raise HttpResponseException(404, "NOT_FOUND", b"NOT_FOUND")
|
||||||
|
elif url == FALLBACK_URL_USER:
|
||||||
|
return SUCCESS_RESULT_USER
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
"URL provided was invalid. This should never be seen."
|
||||||
|
)
|
||||||
|
|
||||||
|
# We assign to a method, which mypy doesn't like.
|
||||||
|
self.api.get_json = Mock(side_effect=get_json) # type: ignore[assignment]
|
||||||
|
|
||||||
|
result = self.get_success(
|
||||||
|
self.api.query_3pe(self.service, "user", PROTOCOL, {b"some": [b"field"]})
|
||||||
|
)
|
||||||
|
self.assertTrue(self.v1_seen)
|
||||||
|
self.assertEqual(self.request_url, FALLBACK_URL_USER)
|
||||||
|
self.assertEqual(result, SUCCESS_RESULT_USER)
|
||||||
|
|
||||||
def test_claim_keys(self) -> None:
|
def test_claim_keys(self) -> None:
|
||||||
"""
|
"""
|
||||||
Tests that the /keys/claim response is properly parsed for missing
|
Tests that the /keys/claim response is properly parsed for missing
|
||||||
|
|
Loading…
Reference in New Issue