Skip to content

Commit fc244bb

Browse files
authored
Use type hinting generics in standard collections (#19046)
aka PEP 585, added in Python 3.9 - https://peps.python.org/pep-0585/ - https://docs.astral.sh/ruff/rules/non-pep585-annotation/
1 parent cba3a81 commit fc244bb

File tree

539 files changed

+4601
-5068
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

539 files changed

+4601
-5068
lines changed

build_rust.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,13 @@
22

33
import itertools
44
import os
5-
from typing import Any, Dict
5+
from typing import Any
66

77
from packaging.specifiers import SpecifierSet
88
from setuptools_rust import Binding, RustExtension
99

1010

11-
def build(setup_kwargs: Dict[str, Any]) -> None:
11+
def build(setup_kwargs: dict[str, Any]) -> None:
1212
original_project_dir = os.path.dirname(os.path.realpath(__file__))
1313
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
1414

changelog.d/19046.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.

contrib/graph/graph.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
import html
2525
import json
2626
import urllib.request
27-
from typing import List
2827

2928
import pydot
3029

@@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str:
3332
return f"{pdu_id}@{origin}"
3433

3534

36-
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
35+
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
3736
"""
3837
Generate a dot and SVG file for a graph of events in the room based on the
3938
topological ordering by querying a homeserver.
@@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
127126
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
128127

129128

130-
def get_pdus(host: str, room: str) -> List[dict]:
129+
def get_pdus(host: str, room: str) -> list[dict]:
131130
transaction = json.loads(
132131
urllib.request.urlopen(
133132
f"http://{host}/_matrix/federation/v1/context/{room}/"

docker/configure_workers_and_start.py

Lines changed: 26 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -65,13 +65,10 @@
6565
from pathlib import Path
6666
from typing import (
6767
Any,
68-
Dict,
69-
List,
7068
Mapping,
7169
MutableMapping,
7270
NoReturn,
7371
Optional,
74-
Set,
7572
SupportsIndex,
7673
)
7774

@@ -96,7 +93,7 @@
9693
# Watching /_matrix/media and related needs a "media" listener
9794
# Stream Writers require "client" and "replication" listeners because they
9895
# have to attach by instance_map to the master process and have client endpoints.
99-
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
96+
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
10097
"pusher": {
10198
"app": "synapse.app.generic_worker",
10299
"listener_resources": [],
@@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
408405

409406
def add_worker_roles_to_shared_config(
410407
shared_config: dict,
411-
worker_types_set: Set[str],
408+
worker_types_set: set[str],
412409
worker_name: str,
413410
worker_port: int,
414411
) -> None:
@@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config(
471468

472469

473470
def merge_worker_template_configs(
474-
existing_dict: Optional[Dict[str, Any]],
475-
to_be_merged_dict: Dict[str, Any],
476-
) -> Dict[str, Any]:
471+
existing_dict: Optional[dict[str, Any]],
472+
to_be_merged_dict: dict[str, Any],
473+
) -> dict[str, Any]:
477474
"""When given an existing dict of worker template configuration consisting with both
478475
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
479476
return new dict.
@@ -484,7 +481,7 @@ def merge_worker_template_configs(
484481
existing_dict.
485482
Returns: The newly merged together dict values.
486483
"""
487-
new_dict: Dict[str, Any] = {}
484+
new_dict: dict[str, Any] = {}
488485
if not existing_dict:
489486
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
490487
new_dict = to_be_merged_dict.copy()
@@ -509,8 +506,8 @@ def merge_worker_template_configs(
509506

510507

511508
def insert_worker_name_for_worker_config(
512-
existing_dict: Dict[str, Any], worker_name: str
513-
) -> Dict[str, Any]:
509+
existing_dict: dict[str, Any], worker_name: str
510+
) -> dict[str, Any]:
514511
"""Insert a given worker name into the worker's configuration dict.
515512
516513
Args:
@@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config(
526523
return dict_to_edit
527524

528525

529-
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
526+
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
530527
"""
531528
Apply multiplier(if found) by returning a new expanded list with some basic error
532529
checking.
@@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
587584

588585
def split_and_strip_string(
589586
given_string: str, split_char: str, max_split: SupportsIndex = -1
590-
) -> List[str]:
587+
) -> list[str]:
591588
"""
592589
Helper to split a string on split_char and strip whitespace from each end of each
593590
element.
@@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None:
616613

617614

618615
def parse_worker_types(
619-
requested_worker_types: List[str],
620-
) -> Dict[str, Set[str]]:
616+
requested_worker_types: list[str],
617+
) -> dict[str, set[str]]:
621618
"""Read the desired list of requested workers and prepare the data for use in
622619
generating worker config files while also checking for potential gotchas.
623620
@@ -633,14 +630,14 @@ def parse_worker_types(
633630
# A counter of worker_base_name -> int. Used for determining the name for a given
634631
# worker when generating its config file, as each worker's name is just
635632
# worker_base_name followed by instance number
636-
worker_base_name_counter: Dict[str, int] = defaultdict(int)
633+
worker_base_name_counter: dict[str, int] = defaultdict(int)
637634

638635
# Similar to above, but more finely grained. This is used to determine we don't have
639636
# more than a single worker for cases where multiples would be bad(e.g. presence).
640-
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
637+
worker_type_shard_counter: dict[str, int] = defaultdict(int)
641638

642639
# The final result of all this processing
643-
dict_to_return: Dict[str, Set[str]] = {}
640+
dict_to_return: dict[str, set[str]] = {}
644641

645642
# Handle any multipliers requested for given workers.
646643
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
@@ -684,7 +681,7 @@ def parse_worker_types(
684681

685682
# Split the worker_type_string on "+", remove whitespace from ends then make
686683
# the list a set so it's deduplicated.
687-
worker_types_set: Set[str] = set(
684+
worker_types_set: set[str] = set(
688685
split_and_strip_string(worker_type_string, "+")
689686
)
690687

@@ -743,7 +740,7 @@ def generate_worker_files(
743740
environ: Mapping[str, str],
744741
config_path: str,
745742
data_dir: str,
746-
requested_worker_types: Dict[str, Set[str]],
743+
requested_worker_types: dict[str, set[str]],
747744
) -> None:
748745
"""Read the desired workers(if any) that is passed in and generate shared
749746
homeserver, nginx and supervisord configs.
@@ -764,7 +761,7 @@ def generate_worker_files(
764761
# First read the original config file and extract the listeners block. Then we'll
765762
# add another listener for replication. Later we'll write out the result to the
766763
# shared config file.
767-
listeners: List[Any]
764+
listeners: list[Any]
768765
if using_unix_sockets:
769766
listeners = [
770767
{
@@ -792,27 +789,27 @@ def generate_worker_files(
792789
# base shared worker jinja2 template. This config file will be passed to all
793790
# workers, included Synapse's main process. It is intended mainly for disabling
794791
# functionality when certain workers are spun up, and adding a replication listener.
795-
shared_config: Dict[str, Any] = {"listeners": listeners}
792+
shared_config: dict[str, Any] = {"listeners": listeners}
796793

797794
# List of dicts that describe workers.
798795
# We pass this to the Supervisor template later to generate the appropriate
799796
# program blocks.
800-
worker_descriptors: List[Dict[str, Any]] = []
797+
worker_descriptors: list[dict[str, Any]] = []
801798

802799
# Upstreams for load-balancing purposes. This dict takes the form of the worker
803800
# type to the ports of each worker. For example:
804801
# {
805802
# worker_type: {1234, 1235, ...}}
806803
# }
807804
# and will be used to construct 'upstream' nginx directives.
808-
nginx_upstreams: Dict[str, Set[int]] = {}
805+
nginx_upstreams: dict[str, set[int]] = {}
809806

810807
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
811808
# will be placed after the proxy_pass directive. The main benefit to representing
812809
# this data as a dict over a str is that we can easily deduplicate endpoints
813810
# across multiple instances of the same worker. The final rendering will be combined
814811
# with nginx_upstreams and placed in /etc/nginx/conf.d.
815-
nginx_locations: Dict[str, str] = {}
812+
nginx_locations: dict[str, str] = {}
816813

817814
# Create the worker configuration directory if it doesn't already exist
818815
os.makedirs("/conf/workers", exist_ok=True)
@@ -846,7 +843,7 @@ def generate_worker_files(
846843
# yaml config file
847844
for worker_name, worker_types_set in requested_worker_types.items():
848845
# The collected and processed data will live here.
849-
worker_config: Dict[str, Any] = {}
846+
worker_config: dict[str, Any] = {}
850847

851848
# Merge all worker config templates for this worker into a single config
852849
for worker_type in worker_types_set:
@@ -1029,7 +1026,7 @@ def generate_worker_log_config(
10291026
Returns: the path to the generated file
10301027
"""
10311028
# Check whether we should write worker logs to disk, in addition to the console
1032-
extra_log_template_args: Dict[str, Optional[str]] = {}
1029+
extra_log_template_args: dict[str, Optional[str]] = {}
10331030
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
10341031
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
10351032

@@ -1053,7 +1050,7 @@ def generate_worker_log_config(
10531050
return log_config_filepath
10541051

10551052

1056-
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
1053+
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
10571054
parser = ArgumentParser()
10581055
parser.add_argument(
10591056
"--generate-only",
@@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
10871084
if not worker_types_env:
10881085
# No workers, just the main process
10891086
worker_types = []
1090-
requested_worker_types: Dict[str, Any] = {}
1087+
requested_worker_types: dict[str, Any] = {}
10911088
else:
10921089
# Split type names by comma, ignoring whitespace.
10931090
worker_types = split_and_strip_string(worker_types_env, ",")

docker/start.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import platform
77
import subprocess
88
import sys
9-
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
9+
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
1010

1111
import jinja2
1212

@@ -69,7 +69,7 @@ def generate_config_from_template(
6969
)
7070

7171
# populate some params from data files (if they exist, else create new ones)
72-
environ: Dict[str, Any] = dict(os_environ)
72+
environ: dict[str, Any] = dict(os_environ)
7373
secrets = {
7474
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
7575
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
@@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
200200
subprocess.run(args, check=True)
201201

202202

203-
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
203+
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
204204
mode = args[1] if len(args) > 1 else "run"
205205

206206
# if we were given an explicit user to switch to, do so

pyproject.toml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,12 @@ select = [
7878
"LOG",
7979
# flake8-logging-format
8080
"G",
81+
# pyupgrade
82+
"UP006",
83+
]
84+
extend-safe-fixes = [
85+
# pyupgrade
86+
"UP006"
8187
]
8288

8389
[tool.ruff.lint.isort]

scripts-dev/build_debian_packages.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
import threading
1919
from concurrent.futures import ThreadPoolExecutor
2020
from types import FrameType
21-
from typing import Collection, Optional, Sequence, Set
21+
from typing import Collection, Optional, Sequence
2222

2323
# These are expanded inside the dockerfile to be a fully qualified image name.
2424
# e.g. docker.io/library/debian:bullseye
@@ -54,7 +54,7 @@ def __init__(
5454
):
5555
self.redirect_stdout = redirect_stdout
5656
self._docker_build_args = tuple(docker_build_args or ())
57-
self.active_containers: Set[str] = set()
57+
self.active_containers: set[str] = set()
5858
self._lock = threading.Lock()
5959
self._failed = False
6060

scripts-dev/check_locked_deps_have_sdists.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
#
2222
import sys
2323
from pathlib import Path
24-
from typing import Dict, List
2524

2625
import tomli
2726

@@ -33,7 +32,7 @@ def main() -> None:
3332

3433
# Poetry 1.3+ lockfile format:
3534
# There's a `files` inline table in each [[package]]
36-
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
35+
packages_to_assets: dict[str, list[dict[str, str]]] = {
3736
package["name"]: package["files"] for package in lockfile_content["package"]
3837
}
3938

0 commit comments

Comments
 (0)