Add stricter mypy options (#15694)

Enable warn_unused_configs, strict_concatenate, disallow_subclassing_any,
and disallow_incomplete_defs.
pull/15697/head
Patrick Cloke 2023-05-31 07:18:29 -04:00 committed by GitHub
parent 6fc3deb029
commit c01343de43
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 40 additions and 24 deletions

1
changelog.d/15694.misc Normal file
View File

@ -0,0 +1 @@
Improve type hints.

View File

@ -2,17 +2,29 @@
namespace_packages = True namespace_packages = True
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
follow_imports = normal follow_imports = normal
check_untyped_defs = True
show_error_codes = True show_error_codes = True
show_traceback = True show_traceback = True
mypy_path = stubs mypy_path = stubs
warn_unreachable = True warn_unreachable = True
warn_unused_ignores = True
local_partial_types = True local_partial_types = True
no_implicit_optional = True no_implicit_optional = True
# Strict checks, see mypy --help
warn_unused_configs = True
# disallow_any_generics = True
disallow_subclassing_any = True
# disallow_untyped_calls = True
disallow_untyped_defs = True disallow_untyped_defs = True
strict_equality = True disallow_incomplete_defs = True
# check_untyped_defs = True
# disallow_untyped_decorators = True
warn_redundant_casts = True warn_redundant_casts = True
warn_unused_ignores = True
# warn_return_any = True
# no_implicit_reexport = True
strict_equality = True
strict_concatenate = True
# Run mypy type checking with the minimum supported Python version to catch new usage # Run mypy type checking with the minimum supported Python version to catch new usage
# that isn't backwards-compatible (types, overloads, etc). # that isn't backwards-compatible (types, overloads, etc).
python_version = 3.8 python_version = 3.8
@ -31,6 +43,7 @@ warn_unused_ignores = False
[mypy-synapse.util.caches.treecache] [mypy-synapse.util.caches.treecache]
disallow_untyped_defs = False disallow_untyped_defs = False
disallow_incomplete_defs = False
;; Dependencies without annotations ;; Dependencies without annotations
;; Before ignoring a module, check to see if type stubs are available. ;; Before ignoring a module, check to see if type stubs are available.
@ -40,6 +53,7 @@ disallow_untyped_defs = False
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s ;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
;; `[tool.poetry.dev-dependencies]` list. ;; `[tool.poetry.dev-dependencies]` list.
# https://github.com/lepture/authlib/issues/460
[mypy-authlib.*] [mypy-authlib.*]
ignore_missing_imports = True ignore_missing_imports = True
@ -49,9 +63,11 @@ ignore_missing_imports = True
[mypy-lxml] [mypy-lxml]
ignore_missing_imports = True ignore_missing_imports = True
# https://github.com/msgpack/msgpack-python/issues/448
[mypy-msgpack] [mypy-msgpack]
ignore_missing_imports = True ignore_missing_imports = True
# https://github.com/wolever/parameterized/issues/143
[mypy-parameterized.*] [mypy-parameterized.*]
ignore_missing_imports = True ignore_missing_imports = True
@ -73,6 +89,7 @@ ignore_missing_imports = True
[mypy-srvlookup.*] [mypy-srvlookup.*]
ignore_missing_imports = True ignore_missing_imports = True
# https://github.com/twisted/treq/pull/366
[mypy-treq.*] [mypy-treq.*]
ignore_missing_imports = True ignore_missing_imports = True

View File

@ -59,7 +59,7 @@ def scope_to_list(scope: str) -> List[str]:
return scope.strip().split(" ") return scope.strip().split(" ")
class PrivateKeyJWTWithKid(PrivateKeyJWT): class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc]
"""An implementation of the private_key_jwt client auth method that includes a kid header. """An implementation of the private_key_jwt client auth method that includes a kid header.
This is needed because some providers (Keycloak) require the kid header to figure This is needed because some providers (Keycloak) require the kid header to figure

View File

@ -515,7 +515,7 @@ class FederationServer(FederationBase):
logger.error( logger.error(
"Failed to handle PDU %s", "Failed to handle PDU %s",
event_id, event_id,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore exc_info=(f.type, f.value, f.getTracebackObject()),
) )
return {"error": str(e)} return {"error": str(e)}
@ -1247,7 +1247,7 @@ class FederationServer(FederationBase):
logger.error( logger.error(
"Failed to handle PDU %s", "Failed to handle PDU %s",
event.event_id, event.event_id,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore exc_info=(f.type, f.value, f.getTracebackObject()),
) )
received_ts = await self.store.remove_received_event_from_staging( received_ts = await self.store.remove_received_event_from_staging(

View File

@ -1354,7 +1354,7 @@ class OidcProvider:
finish_request(request) finish_request(request)
class LogoutToken(JWTClaims): class LogoutToken(JWTClaims): # type: ignore[misc]
""" """
Holds and verify claims of a logout token, as per Holds and verify claims of a logout token, as per
https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken

View File

@ -360,7 +360,7 @@ class PaginationHandler:
except Exception: except Exception:
f = Failure() f = Failure()
logger.error( logger.error(
"[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) # type: ignore "[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject())
) )
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED
self._purges_by_id[purge_id].error = f.getErrorMessage() self._purges_by_id[purge_id].error = f.getErrorMessage()
@ -689,7 +689,7 @@ class PaginationHandler:
f = Failure() f = Failure()
logger.error( logger.error(
"failed", "failed",
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore exc_info=(f.type, f.value, f.getTracebackObject()),
) )
self._delete_by_id[delete_id].status = DeleteStatus.STATUS_FAILED self._delete_by_id[delete_id].status = DeleteStatus.STATUS_FAILED
self._delete_by_id[delete_id].error = f.getErrorMessage() self._delete_by_id[delete_id].error = f.getErrorMessage()

View File

@ -108,7 +108,7 @@ def return_json_error(
if f.check(SynapseError): if f.check(SynapseError):
# mypy doesn't understand that f.check asserts the type. # mypy doesn't understand that f.check asserts the type.
exc: SynapseError = f.value # type: ignore exc: SynapseError = f.value
error_code = exc.code error_code = exc.code
error_dict = exc.error_dict(config) error_dict = exc.error_dict(config)
if exc.headers is not None: if exc.headers is not None:
@ -124,7 +124,7 @@ def return_json_error(
"Got cancellation before client disconnection from %r: %r", "Got cancellation before client disconnection from %r: %r",
request.request_metrics.name, request.request_metrics.name,
request, request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] exc_info=(f.type, f.value, f.getTracebackObject()),
) )
else: else:
error_code = 500 error_code = 500
@ -134,7 +134,7 @@ def return_json_error(
"Failed handle request via %r: %r", "Failed handle request via %r: %r",
request.request_metrics.name, request.request_metrics.name,
request, request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] exc_info=(f.type, f.value, f.getTracebackObject()),
) )
# Only respond with an error response if we haven't already started writing, # Only respond with an error response if we haven't already started writing,
@ -172,7 +172,7 @@ def return_html_error(
""" """
if f.check(CodeMessageException): if f.check(CodeMessageException):
# mypy doesn't understand that f.check asserts the type. # mypy doesn't understand that f.check asserts the type.
cme: CodeMessageException = f.value # type: ignore cme: CodeMessageException = f.value
code = cme.code code = cme.code
msg = cme.msg msg = cme.msg
if cme.headers is not None: if cme.headers is not None:
@ -189,7 +189,7 @@ def return_html_error(
logger.error( logger.error(
"Failed handle request %r", "Failed handle request %r",
request, request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] exc_info=(f.type, f.value, f.getTracebackObject()),
) )
elif f.check(CancelledError): elif f.check(CancelledError):
code = HTTP_STATUS_REQUEST_CANCELLED code = HTTP_STATUS_REQUEST_CANCELLED
@ -199,7 +199,7 @@ def return_html_error(
logger.error( logger.error(
"Got cancellation before client disconnection when handling request %r", "Got cancellation before client disconnection when handling request %r",
request, request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] exc_info=(f.type, f.value, f.getTracebackObject()),
) )
else: else:
code = HTTPStatus.INTERNAL_SERVER_ERROR code = HTTPStatus.INTERNAL_SERVER_ERROR
@ -208,7 +208,7 @@ def return_html_error(
logger.error( logger.error(
"Failed handle request %r", "Failed handle request %r",
request, request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type] exc_info=(f.type, f.value, f.getTracebackObject()),
) )
if isinstance(error_template, str): if isinstance(error_template, str):

View File

@ -76,7 +76,7 @@ def unwrapFirstError(failure: Failure) -> Failure:
# the subFailure's value, which will do a better job of preserving stacktraces. # the subFailure's value, which will do a better job of preserving stacktraces.
# (actually, you probably want to use yieldable_gather_results anyway) # (actually, you probably want to use yieldable_gather_results anyway)
failure.trap(defer.FirstError) failure.trap(defer.FirstError)
return failure.value.subFailure # type: ignore[union-attr] # Issue in Twisted's annotations return failure.value.subFailure
P = ParamSpec("P") P = ParamSpec("P")
@ -178,7 +178,7 @@ def log_failure(
""" """
logger.error( logger.error(
msg, exc_info=(failure.type, failure.value, failure.getTracebackObject()) # type: ignore[arg-type] msg, exc_info=(failure.type, failure.value, failure.getTracebackObject())
) )
if not consumeErrors: if not consumeErrors:

View File

@ -138,7 +138,7 @@ class ObservableDeferred(Generic[_T], AbstractObservableDeferred[_T]):
for observer in observers: for observer in observers:
# This is a little bit of magic to correctly propagate stack # This is a little bit of magic to correctly propagate stack
# traces when we `await` on one of the observer deferreds. # traces when we `await` on one of the observer deferreds.
f.value.__failure__ = f # type: ignore[union-attr] f.value.__failure__ = f
try: try:
observer.errback(f) observer.errback(f)
except Exception as e: except Exception as e:

View File

@ -93,10 +93,8 @@ VT = TypeVar("VT")
# a general type var, distinct from either KT or VT # a general type var, distinct from either KT or VT
T = TypeVar("T") T = TypeVar("T")
P = TypeVar("P")
class _TimedListNode(ListNode[T]):
class _TimedListNode(ListNode[P]):
"""A `ListNode` that tracks last access time.""" """A `ListNode` that tracks last access time."""
__slots__ = ["last_access_ts_secs"] __slots__ = ["last_access_ts_secs"]
@ -821,7 +819,7 @@ class AsyncLruCache(Generic[KT, VT]):
utilize external cache systems that require await behaviour to be created. utilize external cache systems that require await behaviour to be created.
""" """
def __init__(self, *args, **kwargs): # type: ignore def __init__(self, *args: Any, **kwargs: Any):
self._lru_cache: LruCache[KT, VT] = LruCache(*args, **kwargs) self._lru_cache: LruCache[KT, VT] = LruCache(*args, **kwargs)
async def get( async def get(

View File

@ -642,7 +642,7 @@ def _make_test_homeserver_synchronous(server: HomeServer) -> None:
pool.runWithConnection = runWithConnection # type: ignore[assignment] pool.runWithConnection = runWithConnection # type: ignore[assignment]
pool.runInteraction = runInteraction # type: ignore[assignment] pool.runInteraction = runInteraction # type: ignore[assignment]
# Replace the thread pool with a threadless 'thread' pool # Replace the thread pool with a threadless 'thread' pool
pool.threadpool = ThreadPool(clock._reactor) # type: ignore[assignment] pool.threadpool = ThreadPool(clock._reactor)
pool.running = True pool.running = True
# We've just changed the Databases to run DB transactions on the same # We've just changed the Databases to run DB transactions on the same