Add missing type definitions for scripts in docker folder (#12280)

Signed-off-by: Jorge Florian <jafn28@gmail.com>
pull/12418/head
Jorge Florian 2022-04-08 12:10:58 +02:00 committed by GitHub
parent 7732c4902c
commit 78e4d96a4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 7 additions and 4 deletions

1
changelog.d/12280.misc Normal file
View File

@ -0,0 +1 @@
Add missing type definitions for scripts in docker folder. Contributed by Jorge Florian.

View File

@ -29,6 +29,7 @@
import os import os
import subprocess import subprocess
import sys import sys
from typing import Any, Dict, Set
import jinja2 import jinja2
import yaml import yaml
@ -36,7 +37,7 @@ import yaml
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080 MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
WORKERS_CONFIG = { WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"pusher": { "pusher": {
"app": "synapse.app.pusher", "app": "synapse.app.pusher",
"listener_resources": [], "listener_resources": [],
@ -355,7 +356,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
# worker_type: {1234, 1235, ...}} # worker_type: {1234, 1235, ...}}
# } # }
# and will be used to construct 'upstream' nginx directives. # and will be used to construct 'upstream' nginx directives.
nginx_upstreams = {} nginx_upstreams: Dict[str, Set[int]] = {}
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be # A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
# placed after the proxy_pass directive. The main benefit to representing this data as a # placed after the proxy_pass directive. The main benefit to representing this data as a
@ -384,7 +385,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
# A counter of worker_type -> int. Used for determining the name for a given # A counter of worker_type -> int. Used for determining the name for a given
# worker type when generating its config file, as each worker's name is just # worker type when generating its config file, as each worker's name is just
# worker_type + instance # # worker_type + instance #
worker_type_counter = {} worker_type_counter: Dict[str, int] = {}
# For each worker type specified by the user, create config values # For each worker type specified by the user, create config values
for worker_type in worker_types: for worker_type in worker_types:
@ -404,7 +405,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
# e.g. federation_reader1 # e.g. federation_reader1
worker_name = worker_type + str(new_worker_count) worker_name = worker_type + str(new_worker_count)
worker_config.update( worker_config.update(
{"name": worker_name, "port": worker_port, "config_path": config_path} {"name": worker_name, "port": str(worker_port), "config_path": config_path}
) )
# Update the shared config with any worker-type specific options # Update the shared config with any worker-type specific options

View File

@ -11,6 +11,7 @@ local_partial_types = True
no_implicit_optional = True no_implicit_optional = True
files = files =
docker/,
scripts-dev/, scripts-dev/,
setup.py, setup.py,
synapse/, synapse/,