Make `scripts-dev` pass `mypy --disallow-untyped-defs` (#12356)
Not enforced in config yet. One day.pull/12589/head
parent
6463244375
commit
30c8e7e408
|
@ -0,0 +1 @@
|
||||||
|
Fix scripts-dev to pass typechecking.
|
10
mypy.ini
10
mypy.ini
|
@ -24,10 +24,6 @@ files =
|
||||||
# https://docs.python.org/3/library/re.html#re.X
|
# https://docs.python.org/3/library/re.html#re.X
|
||||||
exclude = (?x)
|
exclude = (?x)
|
||||||
^(
|
^(
|
||||||
|scripts-dev/build_debian_packages.py
|
|
||||||
|scripts-dev/federation_client.py
|
|
||||||
|scripts-dev/release.py
|
|
||||||
|
|
||||||
|synapse/storage/databases/__init__.py
|
|synapse/storage/databases/__init__.py
|
||||||
|synapse/storage/databases/main/cache.py
|
|synapse/storage/databases/main/cache.py
|
||||||
|synapse/storage/databases/main/devices.py
|
|synapse/storage/databases/main/devices.py
|
||||||
|
@ -308,6 +304,9 @@ ignore_missing_imports = True
|
||||||
[mypy-pympler.*]
|
[mypy-pympler.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
[mypy-redbaron.*]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|
||||||
[mypy-rust_python_jaeger_reporter.*]
|
[mypy-rust_python_jaeger_reporter.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
@ -323,6 +322,9 @@ ignore_missing_imports = True
|
||||||
[mypy-signedjson.*]
|
[mypy-signedjson.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
[mypy-srvlookup.*]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|
||||||
[mypy-treq.*]
|
[mypy-treq.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
|
|
@ -309,14 +309,15 @@ smmap = ">=3.0.1,<6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gitpython"
|
name = "gitpython"
|
||||||
version = "3.1.14"
|
version = "3.1.27"
|
||||||
description = "Python Git Library"
|
description = "GitPython is a python library used to interact with Git repositories"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.4"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
gitdb = ">=4.0.1,<5"
|
gitdb = ">=4.0.1,<5"
|
||||||
|
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hiredis"
|
name = "hiredis"
|
||||||
|
@ -1315,6 +1316,14 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "types-commonmark"
|
||||||
|
version = "0.9.2"
|
||||||
|
description = "Typing stubs for commonmark"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-cryptography"
|
name = "types-cryptography"
|
||||||
version = "3.3.15"
|
version = "3.3.15"
|
||||||
|
@ -1553,7 +1562,7 @@ url_preview = ["lxml"]
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.7"
|
python-versions = "^3.7"
|
||||||
content-hash = "f482a4f594a165dfe01ce253a22510d5faf38647ab0dcebc35789350cafd9bf0"
|
content-hash = "3825cef058b8c9f520ef4b7acb92519be95db9a663a61c2e89a5fe431ed55655"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
attrs = [
|
attrs = [
|
||||||
|
@ -1766,8 +1775,8 @@ gitdb = [
|
||||||
{file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
|
{file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
|
||||||
]
|
]
|
||||||
gitpython = [
|
gitpython = [
|
||||||
{file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"},
|
{file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"},
|
||||||
{file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"},
|
{file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"},
|
||||||
]
|
]
|
||||||
hiredis = [
|
hiredis = [
|
||||||
{file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
|
{file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
|
||||||
|
@ -2588,6 +2597,10 @@ types-bleach = [
|
||||||
{file = "types-bleach-4.1.4.tar.gz", hash = "sha256:2d30c2c4fb6854088ac636471352c9a51bf6c089289800d2a8060820a01cd43a"},
|
{file = "types-bleach-4.1.4.tar.gz", hash = "sha256:2d30c2c4fb6854088ac636471352c9a51bf6c089289800d2a8060820a01cd43a"},
|
||||||
{file = "types_bleach-4.1.4-py3-none-any.whl", hash = "sha256:edffe173ed6d7b6f3543036a96204a9319c3bf6c3645917b14274e43f000cc9b"},
|
{file = "types_bleach-4.1.4-py3-none-any.whl", hash = "sha256:edffe173ed6d7b6f3543036a96204a9319c3bf6c3645917b14274e43f000cc9b"},
|
||||||
]
|
]
|
||||||
|
types-commonmark = [
|
||||||
|
{file = "types-commonmark-0.9.2.tar.gz", hash = "sha256:b894b67750c52fd5abc9a40a9ceb9da4652a391d75c1b480bba9cef90f19fc86"},
|
||||||
|
{file = "types_commonmark-0.9.2-py3-none-any.whl", hash = "sha256:56f20199a1f9a2924443211a0ef97f8b15a8a956a7f4e9186be6950bf38d6d02"},
|
||||||
|
]
|
||||||
types-cryptography = [
|
types-cryptography = [
|
||||||
{file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"},
|
{file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"},
|
||||||
{file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"},
|
{file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"},
|
||||||
|
|
|
@ -251,6 +251,7 @@ flake8 = "*"
|
||||||
mypy = "==0.931"
|
mypy = "==0.931"
|
||||||
mypy-zope = "==0.3.5"
|
mypy-zope = "==0.3.5"
|
||||||
types-bleach = ">=4.1.0"
|
types-bleach = ">=4.1.0"
|
||||||
|
types-commonmark = ">=0.9.2"
|
||||||
types-jsonschema = ">=3.2.0"
|
types-jsonschema = ">=3.2.0"
|
||||||
types-opentracing = ">=2.4.2"
|
types-opentracing = ">=2.4.2"
|
||||||
types-Pillow = ">=8.3.4"
|
types-Pillow = ">=8.3.4"
|
||||||
|
@ -270,7 +271,8 @@ idna = ">=2.5"
|
||||||
|
|
||||||
# The following are used by the release script
|
# The following are used by the release script
|
||||||
click = "==8.1.0"
|
click = "==8.1.0"
|
||||||
GitPython = "==3.1.14"
|
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||||
|
GitPython = ">=3.1.20"
|
||||||
commonmark = "==0.9.1"
|
commonmark = "==0.9.1"
|
||||||
pygithub = "==1.55"
|
pygithub = "==1.55"
|
||||||
# The following are executed as commands by the release script.
|
# The following are executed as commands by the release script.
|
||||||
|
|
|
@ -17,7 +17,8 @@ import subprocess
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from typing import Optional, Sequence
|
from types import FrameType
|
||||||
|
from typing import Collection, Optional, Sequence, Set
|
||||||
|
|
||||||
DISTS = (
|
DISTS = (
|
||||||
"debian:buster", # oldstable: EOL 2022-08
|
"debian:buster", # oldstable: EOL 2022-08
|
||||||
|
@ -41,15 +42,17 @@ projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
|
||||||
class Builder(object):
|
class Builder(object):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, redirect_stdout=False, docker_build_args: Optional[Sequence[str]] = None
|
self,
|
||||||
|
redirect_stdout: bool = False,
|
||||||
|
docker_build_args: Optional[Sequence[str]] = None,
|
||||||
):
|
):
|
||||||
self.redirect_stdout = redirect_stdout
|
self.redirect_stdout = redirect_stdout
|
||||||
self._docker_build_args = tuple(docker_build_args or ())
|
self._docker_build_args = tuple(docker_build_args or ())
|
||||||
self.active_containers = set()
|
self.active_containers: Set[str] = set()
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
self._failed = False
|
self._failed = False
|
||||||
|
|
||||||
def run_build(self, dist, skip_tests=False):
|
def run_build(self, dist: str, skip_tests: bool = False) -> None:
|
||||||
"""Build deb for a single distribution"""
|
"""Build deb for a single distribution"""
|
||||||
|
|
||||||
if self._failed:
|
if self._failed:
|
||||||
|
@ -63,7 +66,7 @@ class Builder(object):
|
||||||
self._failed = True
|
self._failed = True
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _inner_build(self, dist, skip_tests=False):
|
def _inner_build(self, dist: str, skip_tests: bool = False) -> None:
|
||||||
tag = dist.split(":", 1)[1]
|
tag = dist.split(":", 1)[1]
|
||||||
|
|
||||||
# Make the dir where the debs will live.
|
# Make the dir where the debs will live.
|
||||||
|
@ -138,7 +141,7 @@ class Builder(object):
|
||||||
stdout.close()
|
stdout.close()
|
||||||
print("Completed build of %s" % (dist,))
|
print("Completed build of %s" % (dist,))
|
||||||
|
|
||||||
def kill_containers(self):
|
def kill_containers(self) -> None:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
active = list(self.active_containers)
|
active = list(self.active_containers)
|
||||||
|
|
||||||
|
@ -156,8 +159,10 @@ class Builder(object):
|
||||||
self.active_containers.remove(c)
|
self.active_containers.remove(c)
|
||||||
|
|
||||||
|
|
||||||
def run_builds(builder, dists, jobs=1, skip_tests=False):
|
def run_builds(
|
||||||
def sig(signum, _frame):
|
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
||||||
|
) -> None:
|
||||||
|
def sig(signum: int, _frame: Optional[FrameType]) -> None:
|
||||||
print("Caught SIGINT")
|
print("Caught SIGINT")
|
||||||
builder.kill_containers()
|
builder.kill_containers()
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ import argparse
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Optional
|
from typing import Any, Dict, Optional, Tuple
|
||||||
from urllib import parse as urlparse
|
from urllib import parse as urlparse
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
@ -47,13 +47,14 @@ import signedjson.types
|
||||||
import srvlookup
|
import srvlookup
|
||||||
import yaml
|
import yaml
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
|
from urllib3 import HTTPConnectionPool
|
||||||
|
|
||||||
# uncomment the following to enable debug logging of http requests
|
# uncomment the following to enable debug logging of http requests
|
||||||
# from httplib import HTTPConnection
|
# from httplib import HTTPConnection
|
||||||
# HTTPConnection.debuglevel = 1
|
# HTTPConnection.debuglevel = 1
|
||||||
|
|
||||||
|
|
||||||
def encode_base64(input_bytes):
|
def encode_base64(input_bytes: bytes) -> str:
|
||||||
"""Encode bytes as a base64 string without any padding."""
|
"""Encode bytes as a base64 string without any padding."""
|
||||||
|
|
||||||
input_len = len(input_bytes)
|
input_len = len(input_bytes)
|
||||||
|
@ -63,7 +64,7 @@ def encode_base64(input_bytes):
|
||||||
return output_string
|
return output_string
|
||||||
|
|
||||||
|
|
||||||
def encode_canonical_json(value):
|
def encode_canonical_json(value: object) -> bytes:
|
||||||
return json.dumps(
|
return json.dumps(
|
||||||
value,
|
value,
|
||||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||||
|
@ -130,7 +131,7 @@ def request(
|
||||||
sig,
|
sig,
|
||||||
destination,
|
destination,
|
||||||
)
|
)
|
||||||
authorization_headers.append(header.encode("ascii"))
|
authorization_headers.append(header)
|
||||||
print("Authorization: %s" % header, file=sys.stderr)
|
print("Authorization: %s" % header, file=sys.stderr)
|
||||||
|
|
||||||
dest = "matrix://%s%s" % (destination, path)
|
dest = "matrix://%s%s" % (destination, path)
|
||||||
|
@ -139,7 +140,10 @@ def request(
|
||||||
s = requests.Session()
|
s = requests.Session()
|
||||||
s.mount("matrix://", MatrixConnectionAdapter())
|
s.mount("matrix://", MatrixConnectionAdapter())
|
||||||
|
|
||||||
headers = {"Host": destination, "Authorization": authorization_headers[0]}
|
headers: Dict[str, str] = {
|
||||||
|
"Host": destination,
|
||||||
|
"Authorization": authorization_headers[0],
|
||||||
|
}
|
||||||
|
|
||||||
if method == "POST":
|
if method == "POST":
|
||||||
headers["Content-Type"] = "application/json"
|
headers["Content-Type"] = "application/json"
|
||||||
|
@ -154,7 +158,7 @@ def request(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Signs and sends a federation request to a matrix homeserver"
|
description="Signs and sends a federation request to a matrix homeserver"
|
||||||
)
|
)
|
||||||
|
@ -212,6 +216,7 @@ def main():
|
||||||
if not args.server_name or not args.signing_key:
|
if not args.server_name or not args.signing_key:
|
||||||
read_args_from_config(args)
|
read_args_from_config(args)
|
||||||
|
|
||||||
|
assert isinstance(args.signing_key, str)
|
||||||
algorithm, version, key_base64 = args.signing_key.split()
|
algorithm, version, key_base64 = args.signing_key.split()
|
||||||
key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64)
|
key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64)
|
||||||
|
|
||||||
|
@ -233,7 +238,7 @@ def main():
|
||||||
print("")
|
print("")
|
||||||
|
|
||||||
|
|
||||||
def read_args_from_config(args):
|
def read_args_from_config(args: argparse.Namespace) -> None:
|
||||||
with open(args.config, "r") as fh:
|
with open(args.config, "r") as fh:
|
||||||
config = yaml.safe_load(fh)
|
config = yaml.safe_load(fh)
|
||||||
|
|
||||||
|
@ -250,7 +255,7 @@ def read_args_from_config(args):
|
||||||
|
|
||||||
class MatrixConnectionAdapter(HTTPAdapter):
|
class MatrixConnectionAdapter(HTTPAdapter):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def lookup(s, skip_well_known=False):
|
def lookup(s: str, skip_well_known: bool = False) -> Tuple[str, int]:
|
||||||
if s[-1] == "]":
|
if s[-1] == "]":
|
||||||
# ipv6 literal (with no port)
|
# ipv6 literal (with no port)
|
||||||
return s, 8448
|
return s, 8448
|
||||||
|
@ -276,7 +281,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||||
return s, 8448
|
return s, 8448
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_well_known(server_name):
|
def get_well_known(server_name: str) -> Optional[str]:
|
||||||
uri = "https://%s/.well-known/matrix/server" % (server_name,)
|
uri = "https://%s/.well-known/matrix/server" % (server_name,)
|
||||||
print("fetching %s" % (uri,), file=sys.stderr)
|
print("fetching %s" % (uri,), file=sys.stderr)
|
||||||
|
|
||||||
|
@ -299,7 +304,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||||
print("Invalid response from %s: %s" % (uri, e), file=sys.stderr)
|
print("Invalid response from %s: %s" % (uri, e), file=sys.stderr)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_connection(self, url, proxies=None):
|
def get_connection(
|
||||||
|
self, url: str, proxies: Optional[Dict[str, str]] = None
|
||||||
|
) -> HTTPConnectionPool:
|
||||||
parsed = urlparse.urlparse(url)
|
parsed = urlparse.urlparse(url)
|
||||||
|
|
||||||
(host, port) = self.lookup(parsed.netloc)
|
(host, port) = self.lookup(parsed.netloc)
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
can crop up, e.g the cache descriptors.
|
can crop up, e.g the cache descriptors.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Callable, Optional
|
from typing import Callable, Optional, Type
|
||||||
|
|
||||||
from mypy.nodes import ARG_NAMED_OPT
|
from mypy.nodes import ARG_NAMED_OPT
|
||||||
from mypy.plugin import MethodSigContext, Plugin
|
from mypy.plugin import MethodSigContext, Plugin
|
||||||
|
@ -94,7 +94,7 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
||||||
return signature
|
return signature
|
||||||
|
|
||||||
|
|
||||||
def plugin(version: str):
|
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||||
# This is the entry point of the plugin, and let's us deal with the fact
|
# This is the entry point of the plugin, and let's us deal with the fact
|
||||||
# that the mypy plugin interface is *not* stable by looking at the version
|
# that the mypy plugin interface is *not* stable by looking at the version
|
||||||
# string.
|
# string.
|
||||||
|
|
|
@ -25,7 +25,7 @@ import sys
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from os import path
|
from os import path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from typing import List, Optional
|
from typing import Any, List, Optional, cast
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import click
|
import click
|
||||||
|
@ -36,7 +36,9 @@ from github import Github
|
||||||
from packaging import version
|
from packaging import version
|
||||||
|
|
||||||
|
|
||||||
def run_until_successful(command, *args, **kwargs):
|
def run_until_successful(
|
||||||
|
command: str, *args: Any, **kwargs: Any
|
||||||
|
) -> subprocess.CompletedProcess:
|
||||||
while True:
|
while True:
|
||||||
completed_process = subprocess.run(command, *args, **kwargs)
|
completed_process = subprocess.run(command, *args, **kwargs)
|
||||||
exit_code = completed_process.returncode
|
exit_code = completed_process.returncode
|
||||||
|
@ -50,7 +52,7 @@ def run_until_successful(command, *args, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
def cli():
|
def cli() -> None:
|
||||||
"""An interactive script to walk through the parts of creating a release.
|
"""An interactive script to walk through the parts of creating a release.
|
||||||
|
|
||||||
Requires the dev dependencies be installed, which can be done via:
|
Requires the dev dependencies be installed, which can be done via:
|
||||||
|
@ -81,7 +83,7 @@ def cli():
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def prepare():
|
def prepare() -> None:
|
||||||
"""Do the initial stages of creating a release, including creating release
|
"""Do the initial stages of creating a release, including creating release
|
||||||
branch, updating changelog and pushing to GitHub.
|
branch, updating changelog and pushing to GitHub.
|
||||||
"""
|
"""
|
||||||
|
@ -161,7 +163,9 @@ def prepare():
|
||||||
click.get_current_context().abort()
|
click.get_current_context().abort()
|
||||||
|
|
||||||
# Switch to the release branch.
|
# Switch to the release branch.
|
||||||
parsed_new_version: version.Version = version.parse(new_version)
|
# Cast safety: parse() won't return a version.LegacyVersion from our
|
||||||
|
# version string format.
|
||||||
|
parsed_new_version = cast(version.Version, version.parse(new_version))
|
||||||
|
|
||||||
# We assume for debian changelogs that we only do RCs or full releases.
|
# We assume for debian changelogs that we only do RCs or full releases.
|
||||||
assert not parsed_new_version.is_devrelease
|
assert not parsed_new_version.is_devrelease
|
||||||
|
@ -176,7 +180,6 @@ def prepare():
|
||||||
# If the release branch only exists on the remote we check it out
|
# If the release branch only exists on the remote we check it out
|
||||||
# locally.
|
# locally.
|
||||||
repo.git.checkout(release_branch_name)
|
repo.git.checkout(release_branch_name)
|
||||||
release_branch = repo.active_branch
|
|
||||||
else:
|
else:
|
||||||
# If a branch doesn't exist we create one. We ask which one branch it
|
# If a branch doesn't exist we create one. We ask which one branch it
|
||||||
# should be based off, defaulting to sensible values depending on the
|
# should be based off, defaulting to sensible values depending on the
|
||||||
|
@ -198,13 +201,15 @@ def prepare():
|
||||||
click.get_current_context().abort()
|
click.get_current_context().abort()
|
||||||
|
|
||||||
# Check out the base branch and ensure it's up to date
|
# Check out the base branch and ensure it's up to date
|
||||||
repo.head.reference = base_branch
|
repo.head.set_reference(base_branch, "check out the base branch")
|
||||||
repo.head.reset(index=True, working_tree=True)
|
repo.head.reset(index=True, working_tree=True)
|
||||||
if not base_branch.is_remote():
|
if not base_branch.is_remote():
|
||||||
update_branch(repo)
|
update_branch(repo)
|
||||||
|
|
||||||
# Create the new release branch
|
# Create the new release branch
|
||||||
release_branch = repo.create_head(release_branch_name, commit=base_branch)
|
# Type ignore will no longer be needed after GitPython 3.1.28.
|
||||||
|
# See https://github.com/gitpython-developers/GitPython/pull/1419
|
||||||
|
repo.create_head(release_branch_name, commit=base_branch) # type: ignore[arg-type]
|
||||||
|
|
||||||
# Switch to the release branch and ensure it's up to date.
|
# Switch to the release branch and ensure it's up to date.
|
||||||
repo.git.checkout(release_branch_name)
|
repo.git.checkout(release_branch_name)
|
||||||
|
@ -265,7 +270,7 @@ def prepare():
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||||
def tag(gh_token: Optional[str]):
|
def tag(gh_token: Optional[str]) -> None:
|
||||||
"""Tags the release and generates a draft GitHub release"""
|
"""Tags the release and generates a draft GitHub release"""
|
||||||
|
|
||||||
# Make sure we're in a git repo.
|
# Make sure we're in a git repo.
|
||||||
|
@ -293,7 +298,12 @@ def tag(gh_token: Optional[str]):
|
||||||
|
|
||||||
click.echo_via_pager(changes)
|
click.echo_via_pager(changes)
|
||||||
if click.confirm("Edit text?", default=False):
|
if click.confirm("Edit text?", default=False):
|
||||||
changes = click.edit(changes, require_save=False)
|
edited_changes = click.edit(changes, require_save=False)
|
||||||
|
# This assert is for mypy's benefit. click's docs are a little unclear, but
|
||||||
|
# when `require_save=False`, not saving the temp file in the editor returns
|
||||||
|
# the original string.
|
||||||
|
assert edited_changes is not None
|
||||||
|
changes = edited_changes
|
||||||
|
|
||||||
repo.create_tag(tag_name, message=changes, sign=True)
|
repo.create_tag(tag_name, message=changes, sign=True)
|
||||||
|
|
||||||
|
@ -347,7 +357,7 @@ def tag(gh_token: Optional[str]):
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||||
def publish(gh_token: str):
|
def publish(gh_token: str) -> None:
|
||||||
"""Publish release."""
|
"""Publish release."""
|
||||||
|
|
||||||
# Make sure we're in a git repo.
|
# Make sure we're in a git repo.
|
||||||
|
@ -390,7 +400,7 @@ def publish(gh_token: str):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def upload():
|
def upload() -> None:
|
||||||
"""Upload release to pypi."""
|
"""Upload release to pypi."""
|
||||||
|
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
|
@ -418,7 +428,7 @@ def upload():
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def announce():
|
def announce() -> None:
|
||||||
"""Generate markdown to announce the release."""
|
"""Generate markdown to announce the release."""
|
||||||
|
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
|
@ -461,18 +471,19 @@ def get_package_version() -> version.Version:
|
||||||
|
|
||||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||||
"""Find the branch/ref, looking first locally then in the remote."""
|
"""Find the branch/ref, looking first locally then in the remote."""
|
||||||
if ref_name in repo.refs:
|
if ref_name in repo.references:
|
||||||
return repo.refs[ref_name]
|
return repo.references[ref_name]
|
||||||
elif ref_name in repo.remote().refs:
|
elif ref_name in repo.remote().refs:
|
||||||
return repo.remote().refs[ref_name]
|
return repo.remote().refs[ref_name]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def update_branch(repo: git.Repo):
|
def update_branch(repo: git.Repo) -> None:
|
||||||
"""Ensure branch is up to date if it has a remote"""
|
"""Ensure branch is up to date if it has a remote"""
|
||||||
if repo.active_branch.tracking_branch():
|
tracking_branch = repo.active_branch.tracking_branch()
|
||||||
repo.git.merge(repo.active_branch.tracking_branch().name)
|
if tracking_branch:
|
||||||
|
repo.git.merge(tracking_branch.name)
|
||||||
|
|
||||||
|
|
||||||
def get_changes_for_version(wanted_version: version.Version) -> str:
|
def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||||
|
@ -536,7 +547,9 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||||
return "\n".join(version_changelog)
|
return "\n".join(version_changelog)
|
||||||
|
|
||||||
|
|
||||||
def generate_and_write_changelog(current_version: version.Version, new_version: str):
|
def generate_and_write_changelog(
|
||||||
|
current_version: version.Version, new_version: str
|
||||||
|
) -> None:
|
||||||
# We do this by getting a draft so that we can edit it before writing to the
|
# We do this by getting a draft so that we can edit it before writing to the
|
||||||
# changelog.
|
# changelog.
|
||||||
result = run_until_successful(
|
result = run_until_successful(
|
||||||
|
@ -558,8 +571,8 @@ def generate_and_write_changelog(current_version: version.Version, new_version:
|
||||||
f.write(existing_content)
|
f.write(existing_content)
|
||||||
|
|
||||||
# Remove all the news fragments
|
# Remove all the news fragments
|
||||||
for f in glob.iglob("changelog.d/*.*"):
|
for filename in glob.iglob("changelog.d/*.*"):
|
||||||
os.remove(f)
|
os.remove(filename)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -27,7 +27,7 @@ from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||||
from synapse.util import json_encoder
|
from synapse.util import json_encoder
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="""Adds a signature to a JSON object.
|
description="""Adds a signature to a JSON object.
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue