[libcxx-commits] [clang] [libcxx] [lldb] [llvm] [polly] [python] remove Python 3.9 specific typing annotations (PR #156868)
Charles Zablit via libcxx-commits
libcxx-commits at lists.llvm.org
Thu Sep 4 05:02:20 PDT 2025
https://github.com/charles-zablit created https://github.com/llvm/llvm-project/pull/156868
This patch replaces any occurence of lower case generic type hints with its `typing` implementation, i.e `list[str]` becomes `List[str]`.
[Type hinting generic in the standard collection were introduced in Python 3.9](https://peps.python.org/pep-0585/), however the minimum supported Python version is 3.8. This patch helps maintaining backwards compatibility with Python versions lower than 3.9 and will unblock the [bots for Ubuntu 20.04](https://ci.swift.org/view/Swift%20rebranch/job/oss-swift-rebranch-package-ubuntu-20_04/2847/consoleText), which ships with 3.8.
>From 4be53b48dd31ebaa18c1b9d34fac78d5ce831797 Mon Sep 17 00:00:00 2001
From: Charles Zablit <c_zablit at apple.com>
Date: Thu, 4 Sep 2025 12:56:53 +0100
Subject: [PATCH] [python] remove Python 3.9 specific typing annotations
---
.ci/compute_projects.py | 7 +-
.ci/metrics/metrics.py | 5 +-
.github/workflows/commit-access-review.py | 4 +-
clang/bindings/python/clang/cindex.py | 20 ++---
clang/docs/tools/dump_ast_matchers.py | 3 +-
.../dexter/dex/dextIR/DextIR.py | 2 +-
libcxx/utils/generate_escaped_output_table.py | 10 +--
...enerate_extended_grapheme_cluster_table.py | 12 +--
...generate_extended_grapheme_cluster_test.py | 10 +--
.../generate_indic_conjunct_break_table.py | 12 +--
.../utils/generate_width_estimation_table.py | 10 +--
lldb/examples/python/templates/parsed_cmd.py | 5 +-
.../Python/lldbsuite/test/decorators.py | 3 +-
.../test/tools/lldb-dap/dap_server.py | 74 +++++++++----------
.../test/tools/lldb-dap/lldbdap_testcase.py | 10 +--
.../TestBreakpointByLineAndColumn.py | 4 +-
.../API/python_api/target/TestTargetAPI.py | 4 +-
.../API/terminal/TestSTTYBeforeAndAfter.py | 6 +-
llvm/utils/UpdateTestChecks/common.py | 4 +-
llvm/utils/lldbDataFormatters.py | 5 +-
llvm/utils/spirv-sim/spirv-sim.py | 8 +-
polly/lib/External/isl/isl_test_python.py | 13 ++--
third-party/benchmark/tools/strip_asm.py | 3 +-
23 files changed, 125 insertions(+), 109 deletions(-)
diff --git a/.ci/compute_projects.py b/.ci/compute_projects.py
index 40dd0507a9eaf..c90ef223843f6 100644
--- a/.ci/compute_projects.py
+++ b/.ci/compute_projects.py
@@ -7,6 +7,7 @@
"""
from collections.abc import Set
+from typing import List
import pathlib
import platform
import sys
@@ -115,8 +116,6 @@
"lld": "check-lld",
"flang": "check-flang",
"libc": "check-libc",
- "lld": "check-lld",
- "lldb": "check-lldb",
"mlir": "check-mlir",
"openmp": "check-openmp",
"polly": "check-polly",
@@ -204,7 +203,7 @@ def _compute_runtime_check_targets(projects_to_test: Set[str]) -> Set[str]:
return check_targets
-def _get_modified_projects(modified_files: list[str]) -> Set[str]:
+def _get_modified_projects(modified_files: List[str]) -> Set[str]:
modified_projects = set()
for modified_file in modified_files:
path_parts = pathlib.Path(modified_file).parts
@@ -222,7 +221,7 @@ def _get_modified_projects(modified_files: list[str]) -> Set[str]:
return modified_projects
-def get_env_variables(modified_files: list[str], platform: str) -> Set[str]:
+def get_env_variables(modified_files: List[str], platform: str) -> Set[str]:
modified_projects = _get_modified_projects(modified_files)
projects_to_test = _compute_projects_to_test(modified_projects, platform)
projects_to_build = _compute_projects_to_build(projects_to_test)
diff --git a/.ci/metrics/metrics.py b/.ci/metrics/metrics.py
index 143e6ab4cf46a..cb6309bd91224 100644
--- a/.ci/metrics/metrics.py
+++ b/.ci/metrics/metrics.py
@@ -1,3 +1,4 @@
+from typing import List, Tuple, Set
import collections
import datetime
import github
@@ -72,8 +73,8 @@ class GaugeMetric:
def github_get_metrics(
- github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
-) -> tuple[list[JobMetrics], int]:
+ github_repo: github.Repository, last_workflows_seen_as_completed: Set[int]
+) -> Tuple[List[JobMetrics], int]:
"""Gets the metrics for specified Github workflows.
This function takes in a list of workflows to track, and optionally the
diff --git a/.github/workflows/commit-access-review.py b/.github/workflows/commit-access-review.py
index 4f539fe98004a..600541556ceba 100644
--- a/.github/workflows/commit-access-review.py
+++ b/.github/workflows/commit-access-review.py
@@ -9,13 +9,13 @@
#
# ===------------------------------------------------------------------------===#
+from typing import List
import datetime
import github
import re
import requests
import time
import sys
-import re
class User:
@@ -64,7 +64,7 @@ def __repr__(self):
def check_manual_requests(
gh: github.Github, start_date: datetime.datetime
-) -> list[str]:
+) -> List[str]:
"""
Return a list of users who have been asked since ``start_date`` if they
want to keep their commit access or if they have applied for commit
diff --git a/clang/bindings/python/clang/cindex.py b/clang/bindings/python/clang/cindex.py
index 824674309d262..353b7c833f86f 100644
--- a/clang/bindings/python/clang/cindex.py
+++ b/clang/bindings/python/clang/cindex.py
@@ -93,6 +93,8 @@
Iterator,
Literal,
Optional,
+ List,
+ Tuple,
Sequence,
Type as TType,
TypeVar,
@@ -106,9 +108,9 @@
StrPath: TypeAlias = TUnion[str, os.PathLike[str]]
LibFunc: TypeAlias = TUnion[
- "tuple[str, Optional[list[Any]]]",
- "tuple[str, Optional[list[Any]], Any]",
- "tuple[str, Optional[list[Any]], Any, Callable[..., Any]]",
+ "Tuple[str, Optional[List[Any]]]",
+ "Tuple[str, Optional[List[Any]], Any]",
+ "Tuple[str, Optional[List[Any]], Any, Callable[..., Any]]",
]
TSeq = TypeVar("TSeq", covariant=True)
@@ -2216,7 +2218,7 @@ def get_children(self) -> Iterator[Cursor]:
"""Return an iterator for accessing the children of this cursor."""
# FIXME: Expose iteration from CIndex, PR6125.
- def visitor(child: Cursor, _: Cursor, children: list[Cursor]) -> int:
+ def visitor(child: Cursor, _: Cursor, children: List[Cursor]) -> int:
# FIXME: Document this assertion in API.
assert not child.is_null()
@@ -2225,7 +2227,7 @@ def visitor(child: Cursor, _: Cursor, children: list[Cursor]) -> int:
children.append(child)
return 1 # continue
- children: list[Cursor] = []
+ children: List[Cursor] = []
conf.lib.clang_visitChildren(self, cursor_visit_callback(visitor), children)
return iter(children)
@@ -2845,7 +2847,7 @@ def visitor(field: Cursor, _: Any) -> Literal[1]:
fields.append(field)
return 1 # continue
- fields: list[Cursor] = []
+ fields: List[Cursor] = []
conf.lib.clang_Type_visitFields(self, fields_visit_callback(visitor), fields)
return iter(fields)
@@ -2860,7 +2862,7 @@ def visitor(base: Cursor, _: Any) -> Literal[1]:
bases.append(base)
return 1 # continue
- bases: list[Cursor] = []
+ bases: List[Cursor] = []
conf.lib.clang_visitCXXBaseClasses(self, fields_visit_callback(visitor), bases)
return iter(bases)
@@ -2875,7 +2877,7 @@ def visitor(method: Cursor, _: Any) -> Literal[1]:
methods.append(method)
return 1 # continue
- methods: list[Cursor] = []
+ methods: List[Cursor] = []
conf.lib.clang_visitCXXMethods(self, fields_visit_callback(visitor), methods)
return iter(methods)
@@ -3992,7 +3994,7 @@ def set_property(self, property, value):
fields_visit_callback = CFUNCTYPE(c_int, Cursor, py_object)
# Functions strictly alphabetical order.
-FUNCTION_LIST: list[LibFunc] = [
+FUNCTION_LIST: List[LibFunc] = [
(
"clang_annotateTokens",
[TranslationUnit, POINTER(Token), c_uint, POINTER(Cursor)],
diff --git a/clang/docs/tools/dump_ast_matchers.py b/clang/docs/tools/dump_ast_matchers.py
index 46b7bb718ba08..a0e1882a962ec 100755
--- a/clang/docs/tools/dump_ast_matchers.py
+++ b/clang/docs/tools/dump_ast_matchers.py
@@ -6,6 +6,7 @@
import collections
import re
import os
+from typing import Dict
try:
from urllib.request import urlopen
@@ -185,7 +186,7 @@ def add_matcher(result_type, name, args, comment, is_dyncast=False):
lookup = result_type + name + esc(args)
if dict.get(lookup) is None or len(dict.get(lookup)) < len(matcher_html):
- dict[lookup] = matcher_html
+ Dict[lookup] = matcher_html
def act_on_decl(declaration, comment, allowed_types):
diff --git a/cross-project-tests/debuginfo-tests/dexter/dex/dextIR/DextIR.py b/cross-project-tests/debuginfo-tests/dexter/dex/dextIR/DextIR.py
index 42500c4b9681d..98e8c93e0aa83 100644
--- a/cross-project-tests/debuginfo-tests/dexter/dex/dextIR/DextIR.py
+++ b/cross-project-tests/debuginfo-tests/dexter/dex/dextIR/DextIR.py
@@ -38,7 +38,7 @@ class DextIR:
determine the debugging score for a given test.
Args:
- commands: { name (str), commands (list[CommandIR])
+ commands: { name (str), commands (List[CommandIR])
"""
def __init__(
diff --git a/libcxx/utils/generate_escaped_output_table.py b/libcxx/utils/generate_escaped_output_table.py
index 59dd707ae6126..c618d4035064f 100755
--- a/libcxx/utils/generate_escaped_output_table.py
+++ b/libcxx/utils/generate_escaped_output_table.py
@@ -16,7 +16,7 @@
from io import StringIO
from pathlib import Path
from dataclasses import dataclass
-from typing import Optional
+from typing import Optional, List
import re
import sys
@@ -60,7 +60,7 @@ def parsePropertyLine(inputLine: str) -> Optional[PropertyRange]:
return None
-def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
+def compactPropertyRanges(input: List[PropertyRange]) -> List[PropertyRange]:
"""
Merges overlapping and consecutive ranges to one range.
@@ -242,8 +242,8 @@ def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
#endif // _LIBCPP___FORMAT_ESCAPED_OUTPUT_TABLE_H"""
-def property_ranges_to_table(ranges: list[PropertyRange]) -> list[Entry]:
- result = list[Entry]()
+def property_ranges_to_table(ranges: List[PropertyRange]) -> List[Entry]:
+ result = List[Entry]()
high = -1
for range in sorted(ranges, key=lambda x: x.lower):
# Validate overlapping ranges
@@ -265,7 +265,7 @@ def property_ranges_to_table(ranges: list[PropertyRange]) -> list[Entry]:
def generate_cpp_data(
- ranges: list[PropertyRange], unallocated: int, gap_lower: int, gap_upper: int
+ ranges: List[PropertyRange], unallocated: int, gap_lower: int, gap_upper: int
) -> str:
result = StringIO()
table = property_ranges_to_table(ranges)
diff --git a/libcxx/utils/generate_extended_grapheme_cluster_table.py b/libcxx/utils/generate_extended_grapheme_cluster_table.py
index eba88a4f48776..1bb598517fa7d 100755
--- a/libcxx/utils/generate_extended_grapheme_cluster_table.py
+++ b/libcxx/utils/generate_extended_grapheme_cluster_table.py
@@ -16,7 +16,7 @@
from io import StringIO
from pathlib import Path
from dataclasses import dataclass
-from typing import Optional
+from typing import Optional, List
import re
import sys
@@ -54,7 +54,7 @@ def parsePropertyLine(inputLine: str) -> Optional[PropertyRange]:
return None
-def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
+def compactPropertyRanges(input: List[PropertyRange]) -> List[PropertyRange]:
"""
Merges consecutive ranges with the same property to one range.
@@ -238,10 +238,10 @@ def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
def property_ranges_to_table(
- ranges: list[PropertyRange], props: list[str]
-) -> list[Entry]:
+ ranges: List[PropertyRange], props: List[str]
+) -> List[Entry]:
assert len(props) < 16
- result = list[Entry]()
+ result = List[Entry]()
high = -1
for range in sorted(ranges, key=lambda x: x.lower):
# Validate overlapping ranges
@@ -262,7 +262,7 @@ def property_ranges_to_table(
cpp_entrytemplate = " 0x{:08x}"
-def generate_cpp_data(prop_name: str, ranges: list[PropertyRange]) -> str:
+def generate_cpp_data(prop_name: str, ranges: List[PropertyRange]) -> str:
result = StringIO()
prop_values = sorted(set(x.prop for x in ranges))
table = property_ranges_to_table(ranges, prop_values)
diff --git a/libcxx/utils/generate_extended_grapheme_cluster_test.py b/libcxx/utils/generate_extended_grapheme_cluster_test.py
index e0a6003ecd53c..e83fa7e1ce722 100755
--- a/libcxx/utils/generate_extended_grapheme_cluster_test.py
+++ b/libcxx/utils/generate_extended_grapheme_cluster_test.py
@@ -15,17 +15,17 @@
from pathlib import Path
from dataclasses import dataclass, field
-from typing import Optional, TextIO
+from typing import Optional, TextIO, List
import sys
@dataclass
class BreakTestItem:
- code_points: list[int] = field(default_factory=list)
+ code_points: List[int] = field(default_factory=list)
encoded: str = ""
- breaks_utf8: list[int] = field(default_factory=list)
- breaks_utf16: list[int] = field(default_factory=list)
- breaks_utf32: list[int] = field(default_factory=list)
+ breaks_utf8: List[int] = field(default_factory=list)
+ breaks_utf16: List[int] = field(default_factory=list)
+ breaks_utf32: List[int] = field(default_factory=list)
class CommentLine:
diff --git a/libcxx/utils/generate_indic_conjunct_break_table.py b/libcxx/utils/generate_indic_conjunct_break_table.py
index 580d8157ffebf..fc1ab26c42a4c 100755
--- a/libcxx/utils/generate_indic_conjunct_break_table.py
+++ b/libcxx/utils/generate_indic_conjunct_break_table.py
@@ -16,7 +16,7 @@
from io import StringIO
from pathlib import Path
from dataclasses import dataclass
-from typing import Optional
+from typing import Optional, List
import re
import sys
@@ -54,7 +54,7 @@ def parsePropertyLine(inputLine: str) -> Optional[PropertyRange]:
-def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
+def compactPropertyRanges(input: List[PropertyRange]) -> List[PropertyRange]:
"""
Merges consecutive ranges with the same property to one range.
@@ -231,10 +231,10 @@ def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
def property_ranges_to_table(
- ranges: list[PropertyRange], props: list[str]
-) -> list[Entry]:
+ ranges: List[PropertyRange], props: List[str]
+) -> List[Entry]:
assert len(props) < 4
- result = list[Entry]()
+ result = List[Entry]()
high = -1
for range in sorted(ranges, key=lambda x: x.lower):
# Validate overlapping ranges
@@ -255,7 +255,7 @@ def property_ranges_to_table(
cpp_entrytemplate = " 0x{:08x}"
-def generate_cpp_data(prop_name: str, ranges: list[PropertyRange]) -> str:
+def generate_cpp_data(prop_name: str, ranges: List[PropertyRange]) -> str:
result = StringIO()
prop_values = sorted(set(x.prop for x in ranges))
table = property_ranges_to_table(ranges, prop_values)
diff --git a/libcxx/utils/generate_width_estimation_table.py b/libcxx/utils/generate_width_estimation_table.py
index f81f0ba77489e..c1b1aa683db63 100644
--- a/libcxx/utils/generate_width_estimation_table.py
+++ b/libcxx/utils/generate_width_estimation_table.py
@@ -16,7 +16,7 @@
from io import StringIO
from pathlib import Path
from dataclasses import dataclass
-from typing import Optional
+from typing import Optional, List
import re
import sys
@@ -75,7 +75,7 @@ def parsePropertyLine(inputLine: str) -> Optional[PropertyRange]:
return None
-def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
+def compactPropertyRanges(input: List[PropertyRange]) -> List[PropertyRange]:
"""
Merges overlapping and consecutive ranges to one range.
@@ -268,14 +268,14 @@ def compactPropertyRanges(input: list[PropertyRange]) -> list[PropertyRange]:
#endif // _LIBCPP___FORMAT_WIDTH_ESTIMATION_TABLE_H"""
-def property_ranges_to_table(ranges: list[PropertyRange]) -> list[Entry]:
+def property_ranges_to_table(ranges: List[PropertyRange]) -> List[Entry]:
# The maximum value that can be encoded in the available bits in the
# __entries table.
upper_bound = 0x3FFFF
# The maximum offset in an __entries entry. Larger offsets will be
# splitted and stored in multiple entries.
chunk = 16384
- result = list[Entry]()
+ result = List[Entry]()
high = -1
for range in sorted(ranges, key=lambda x: x.lower):
# Validate overlapping ranges
@@ -297,7 +297,7 @@ def property_ranges_to_table(ranges: list[PropertyRange]) -> list[Entry]:
cpp_entrytemplate = " 0x{:08x} /* {:08x} - {:08x} [{:>5}] */"
-def generate_cpp_data(ranges: list[PropertyRange], upper_bound: int) -> str:
+def generate_cpp_data(ranges: List[PropertyRange], upper_bound: int) -> str:
result = StringIO()
table = property_ranges_to_table(ranges)
result.write(
diff --git a/lldb/examples/python/templates/parsed_cmd.py b/lldb/examples/python/templates/parsed_cmd.py
index 13d6eae405c08..56122881c5396 100644
--- a/lldb/examples/python/templates/parsed_cmd.py
+++ b/lldb/examples/python/templates/parsed_cmd.py
@@ -109,6 +109,7 @@ def handle_argument_completion(self, args, arg_pos, cursor_pos):
import inspect
import lldb
import sys
+from typing import Dict
from abc import abstractmethod
# Some methods to translate common value types. Should return a
@@ -350,9 +351,9 @@ def add_option(self, short_option, long_option, help, default,
"default" : default}
if enum_values:
- dict["enum_values"] = enum_values
+ Dict["enum_values"] = enum_values
if groups:
- dict["groups"] = groups
+ Dict["groups"] = groups
self.options_dict[long_option] = dict
diff --git a/lldb/packages/Python/lldbsuite/test/decorators.py b/lldb/packages/Python/lldbsuite/test/decorators.py
index a391319ca9b0e..ff96673b8e6e3 100644
--- a/lldb/packages/Python/lldbsuite/test/decorators.py
+++ b/lldb/packages/Python/lldbsuite/test/decorators.py
@@ -1,6 +1,7 @@
# System modules
from functools import wraps
from packaging import version
+from typing import List
import ctypes
import locale
import os
@@ -1148,7 +1149,7 @@ def is_feature_enabled():
return skipTestIfFn(is_feature_enabled)
-def skipIfBuildType(types: list[str]):
+def skipIfBuildType(types: List[str]):
"""Skip tests if built in a specific CMAKE_BUILD_TYPE.
Supported types include 'Release', 'RelWithDebInfo', 'Debug', 'MinSizeRel'.
diff --git a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
index 9786678aa53f9..3643c363bd6df 100644
--- a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
+++ b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py
@@ -12,12 +12,12 @@
import sys
import threading
import time
-from typing import Any, Optional, Union, BinaryIO, TextIO
+from typing import Any, Optional, Union, BinaryIO, TextIO, List, Dict, Tuple
## DAP type references
-Event = dict[str, Any]
-Request = dict[str, Any]
-Response = dict[str, Any]
+Event = Dict[str, Any]
+Request = Dict[str, Any]
+Response = Dict[str, Any]
ProtocolMessage = Union[Event, Request, Response]
@@ -144,7 +144,7 @@ def __init__(
self,
recv: BinaryIO,
send: BinaryIO,
- init_commands: list[str],
+ init_commands: List[str],
log_file: Optional[TextIO] = None,
):
# For debugging test failures, try setting `trace_file = sys.stderr`.
@@ -152,20 +152,20 @@ def __init__(
self.log_file = log_file
self.send = send
self.recv = recv
- self.recv_packets: list[Optional[ProtocolMessage]] = []
+ self.recv_packets: List[Optional[ProtocolMessage]] = []
self.recv_condition = threading.Condition()
self.recv_thread = threading.Thread(target=self._read_packet_thread)
self.process_event_body = None
self.exit_status: Optional[int] = None
- self.capabilities: dict[str, Any] = {}
- self.progress_events: list[Event] = []
+ self.capabilities: Dict[str, Any] = {}
+ self.progress_events: List[Event] = []
self.reverse_requests = []
self.sequence = 1
self.threads = None
self.thread_stop_reasons = {}
self.recv_thread.start()
self.output_condition = threading.Condition()
- self.output: dict[str, list[str]] = {}
+ self.output: Dict[str, List[str]] = {}
self.configuration_done_sent = False
self.initialized = False
self.frame_scopes = {}
@@ -319,7 +319,7 @@ def _process_continued(self, all_threads_continued: bool):
if all_threads_continued:
self.thread_stop_reasons = {}
- def _update_verified_breakpoints(self, breakpoints: list[Event]):
+ def _update_verified_breakpoints(self, breakpoints: List[Event]):
for breakpoint in breakpoints:
if "id" in breakpoint:
self.resolved_breakpoints[str(breakpoint["id"])] = breakpoint.get(
@@ -347,7 +347,7 @@ def send_packet(self, command_dict: Request, set_sequence=True):
def recv_packet(
self,
filter_type: Optional[str] = None,
- filter_event: Optional[Union[str, list[str]]] = None,
+ filter_event: Optional[Union[str, List[str]]] = None,
timeout: Optional[float] = None,
) -> Optional[ProtocolMessage]:
"""Get a JSON packet from the VSCode debug adapter. This function
@@ -435,7 +435,7 @@ def send_recv(self, command):
return None
def wait_for_event(
- self, filter: Union[str, list[str]], timeout: Optional[float] = None
+ self, filter: Union[str, List[str]], timeout: Optional[float] = None
) -> Optional[Event]:
"""Wait for the first event that matches the filter."""
return self.recv_packet(
@@ -444,7 +444,7 @@ def wait_for_event(
def wait_for_stopped(
self, timeout: Optional[float] = None
- ) -> Optional[list[Event]]:
+ ) -> Optional[List[Event]]:
stopped_events = []
stopped_event = self.wait_for_event(
filter=["stopped", "exited"], timeout=timeout
@@ -463,7 +463,7 @@ def wait_for_stopped(
return stopped_events
def wait_for_breakpoint_events(self, timeout: Optional[float] = None):
- breakpoint_events: list[Event] = []
+ breakpoint_events: List[Event] = []
while True:
event = self.wait_for_event("breakpoint", timeout=timeout)
if not event:
@@ -472,7 +472,7 @@ def wait_for_breakpoint_events(self, timeout: Optional[float] = None):
return breakpoint_events
def wait_for_breakpoints_to_be_verified(
- self, breakpoint_ids: list[str], timeout: Optional[float] = None
+ self, breakpoint_ids: List[str], timeout: Optional[float] = None
):
"""Wait for all breakpoints to be verified. Return all unverified breakpoints."""
while any(id not in self.resolved_breakpoints for id in breakpoint_ids):
@@ -654,16 +654,16 @@ def request_attach(
program: Optional[str] = None,
pid: Optional[int] = None,
waitFor=False,
- initCommands: Optional[list[str]] = None,
- preRunCommands: Optional[list[str]] = None,
- attachCommands: Optional[list[str]] = None,
- postRunCommands: Optional[list[str]] = None,
- stopCommands: Optional[list[str]] = None,
- exitCommands: Optional[list[str]] = None,
- terminateCommands: Optional[list[str]] = None,
+ initCommands: Optional[List[str]] = None,
+ preRunCommands: Optional[List[str]] = None,
+ attachCommands: Optional[List[str]] = None,
+ postRunCommands: Optional[List[str]] = None,
+ stopCommands: Optional[List[str]] = None,
+ exitCommands: Optional[List[str]] = None,
+ terminateCommands: Optional[List[str]] = None,
coreFile: Optional[str] = None,
stopOnEntry=False,
- sourceMap: Optional[Union[list[tuple[str, str]], dict[str, str]]] = None,
+ sourceMap: Optional[Union[List[Tuple[str, str]], Dict[str, str]]] = None,
gdbRemotePort: Optional[int] = None,
gdbRemoteHostname: Optional[str] = None,
):
@@ -883,9 +883,9 @@ def request_launch(
self,
program: str,
*,
- args: Optional[list[str]] = None,
+ args: Optional[List[str]] = None,
cwd: Optional[str] = None,
- env: Optional[dict[str, str]] = None,
+ env: Optional[Dict[str, str]] = None,
stopOnEntry=False,
disableASLR=False,
disableSTDIO=False,
@@ -894,14 +894,14 @@ def request_launch(
enableAutoVariableSummaries=False,
displayExtendedBacktrace=False,
enableSyntheticChildDebugging=False,
- initCommands: Optional[list[str]] = None,
- preRunCommands: Optional[list[str]] = None,
- launchCommands: Optional[list[str]] = None,
- postRunCommands: Optional[list[str]] = None,
- stopCommands: Optional[list[str]] = None,
- exitCommands: Optional[list[str]] = None,
- terminateCommands: Optional[list[str]] = None,
- sourceMap: Optional[Union[list[tuple[str, str]], dict[str, str]]] = None,
+ initCommands: Optional[List[str]] = None,
+ preRunCommands: Optional[List[str]] = None,
+ launchCommands: Optional[List[str]] = None,
+ postRunCommands: Optional[List[str]] = None,
+ stopCommands: Optional[List[str]] = None,
+ exitCommands: Optional[List[str]] = None,
+ terminateCommands: Optional[List[str]] = None,
+ sourceMap: Optional[Union[List[Tuple[str, str]], Dict[str, str]]] = None,
sourcePath: Optional[str] = None,
debuggerRoot: Optional[str] = None,
commandEscapePrefix: Optional[str] = None,
@@ -1295,9 +1295,9 @@ def __init__(
self,
executable: Optional[str] = None,
connection: Optional[str] = None,
- init_commands: list[str] = [],
+ init_commands: List[str] = [],
log_file: Optional[TextIO] = None,
- env: Optional[dict[str, str]] = None,
+ env: Optional[Dict[str, str]] = None,
):
self.process = None
self.connection = None
@@ -1333,10 +1333,10 @@ def launch(
cls,
*,
executable: str,
- env: Optional[dict[str, str]] = None,
+ env: Optional[Dict[str, str]] = None,
log_file: Optional[TextIO] = None,
connection: Optional[str] = None,
- ) -> tuple[subprocess.Popen, Optional[str]]:
+ ) -> Tuple[subprocess.Popen, Optional[str]]:
adapter_env = os.environ.copy()
if env is not None:
adapter_env.update(env)
diff --git a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
index 3b54d598c3509..ccdb000c39d71 100644
--- a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
+++ b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py
@@ -1,6 +1,6 @@
import os
import time
-from typing import Optional
+from typing import Optional, Dict, List
import uuid
import dap_server
@@ -18,7 +18,7 @@ class DAPTestCaseBase(TestBase):
def create_debug_adapter(
self,
- lldbDAPEnv: Optional[dict[str, str]] = None,
+ lldbDAPEnv: Optional[Dict[str, str]] = None,
connection: Optional[str] = None,
):
"""Create the Visual Studio Code debug adapter"""
@@ -36,7 +36,7 @@ def create_debug_adapter(
def build_and_create_debug_adapter(
self,
- lldbDAPEnv: Optional[dict[str, str]] = None,
+ lldbDAPEnv: Optional[Dict[str, str]] = None,
dictionary: Optional[dict] = None,
):
self.build(dictionary=dictionary)
@@ -110,7 +110,7 @@ def set_function_breakpoints(
return breakpoint_ids
def wait_for_breakpoints_to_resolve(
- self, breakpoint_ids: list[str], timeout: Optional[float] = DEFAULT_TIMEOUT
+ self, breakpoint_ids: List[str], timeout: Optional[float] = DEFAULT_TIMEOUT
):
unresolved_breakpoints = self.dap_server.wait_for_breakpoints_to_be_verified(
breakpoint_ids, timeout
@@ -476,7 +476,7 @@ def build_and_launch(
self,
program,
*,
- lldbDAPEnv: Optional[dict[str, str]] = None,
+ lldbDAPEnv: Optional[Dict[str, str]] = None,
**kwargs,
):
"""Build the default Makefile target, create the DAP debug adapter,
diff --git a/lldb/test/API/functionalities/breakpoint/breakpoint_by_line_and_column/TestBreakpointByLineAndColumn.py b/lldb/test/API/functionalities/breakpoint/breakpoint_by_line_and_column/TestBreakpointByLineAndColumn.py
index 5798c8ffa8220..f4158b83c2a8f 100644
--- a/lldb/test/API/functionalities/breakpoint/breakpoint_by_line_and_column/TestBreakpointByLineAndColumn.py
+++ b/lldb/test/API/functionalities/breakpoint/breakpoint_by_line_and_column/TestBreakpointByLineAndColumn.py
@@ -4,6 +4,8 @@
import re
import lldb
+from typing import List
+
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
@@ -102,7 +104,7 @@ def testBreakpointByLineAndColumnNearestCode(self):
list = target.FindCompileUnits(lldb.SBFileSpec("main.cpp", False))
# Executable has been built just from one source file 'main.cpp',
# so we may check only the first element of list.
- compile_unit = list[0].GetCompileUnit()
+ compile_unit = List[0].GetCompileUnit()
found = False
for line_entry in compile_unit:
diff --git a/lldb/test/API/python_api/target/TestTargetAPI.py b/lldb/test/API/python_api/target/TestTargetAPI.py
index d346563af18e2..5a76980e638fe 100644
--- a/lldb/test/API/python_api/target/TestTargetAPI.py
+++ b/lldb/test/API/python_api/target/TestTargetAPI.py
@@ -4,6 +4,8 @@
import os
import lldb
+from typing import List
+
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
@@ -293,7 +295,7 @@ def find_compile_units(self, exe):
# Executable has been built just from one source file 'main.c',
# so we may check only the first element of list.
self.assertEqual(
- list[0].GetCompileUnit().GetFileSpec().GetFilename(), source_name
+ List[0].GetCompileUnit().GetFileSpec().GetFilename(), source_name
)
def find_functions(self, exe_name):
diff --git a/lldb/test/API/terminal/TestSTTYBeforeAndAfter.py b/lldb/test/API/terminal/TestSTTYBeforeAndAfter.py
index 313a265319dba..8d3523463c794 100644
--- a/lldb/test/API/terminal/TestSTTYBeforeAndAfter.py
+++ b/lldb/test/API/terminal/TestSTTYBeforeAndAfter.py
@@ -5,6 +5,8 @@
import lldb
import io
import sys
+from typing import Tuple
+
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
@@ -99,6 +101,6 @@ def test_stty_dash_a_before_and_afetr_invoking_lldb_command(self):
if self.TraceOn():
print("tuple->%s" % str(tuple))
# Every line should compare equal until the first blank line.
- if len(tuple[0]) == 0:
+ if len(Tuple[0]) == 0:
break
- self.assertEqual(tuple[0], tuple[1])
+ self.assertEqual(Tuple[0], Tuple[1])
diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index 178c623e33e0e..d573748006967 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -9,7 +9,7 @@
import sys
import shlex
-from typing import List, Mapping, Set
+from typing import List, Mapping, Set, Tuple
##### Common utilities for update_*test_checks.py
@@ -849,7 +849,7 @@ def __init__(self, run_list, flags, scrubber_args, path, ginfo):
self._global_var_dict = {}
self._processed_prefixes = set()
for tuple in run_list:
- for prefix in tuple[0]:
+ for prefix in Tuple[0]:
self._func_dict.update({prefix: dict()})
self._func_order.update({prefix: []})
self._global_var_dict.update({prefix: dict()})
diff --git a/llvm/utils/lldbDataFormatters.py b/llvm/utils/lldbDataFormatters.py
index 988827ab4aa50..381b8091b2879 100644
--- a/llvm/utils/lldbDataFormatters.py
+++ b/llvm/utils/lldbDataFormatters.py
@@ -8,6 +8,7 @@
import collections
import lldb
import json
+from typing import List, Dict
def __lldb_init_module(debugger, internal_dict):
@@ -327,7 +328,7 @@ class DenseMapSynthetic:
valobj: lldb.SBValue
# The indexes into `Buckets` that contain valid map entries.
- child_buckets: list[int]
+ child_buckets: List[int]
def __init__(self, valobj: lldb.SBValue, _) -> None:
self.valobj = valobj
@@ -370,7 +371,7 @@ def update(self):
# is repeated is either the empty key or the tombstone key.
# For each key, collect a list of buckets it appears in.
- key_buckets: dict[str, list[int]] = collections.defaultdict(list)
+ key_buckets: Dict[str, List[int]] = collections.defaultdict(list)
for index in range(num_buckets):
key = buckets.GetValueForExpressionPath(f"[{index}].first")
key_buckets[str(key.data)].append(index)
diff --git a/llvm/utils/spirv-sim/spirv-sim.py b/llvm/utils/spirv-sim/spirv-sim.py
index 428b0ca4eb796..6c16466dd5c7a 100755
--- a/llvm/utils/spirv-sim/spirv-sim.py
+++ b/llvm/utils/spirv-sim/spirv-sim.py
@@ -9,6 +9,8 @@
import inspect
import re
import sys
+from typing import Set
+
RE_EXPECTS = re.compile(r"^([0-9]+,)*[0-9]+$")
@@ -389,7 +391,7 @@ def dump(self, function_name: Optional[str] = None):
class ConvergenceRequirement:
mergeTarget: InstructionPointer
continueTarget: Optional[InstructionPointer]
- impactedLanes: set[int]
+ impactedLanes: Set[int]
Task = Dict[InstructionPointer, List[Lane]]
@@ -407,7 +409,7 @@ class Wave:
# E.g: the set of lanes required to merge before executing the merge block.
_convergence_requirements: List[ConvergenceRequirement]
# The indices of the active lanes for the current executing instruction.
- _active_lane_indices: set[int]
+ _active_lane_indices: Set[int]
def __init__(self, module, wave_size: int) -> None:
assert wave_size > 0
@@ -424,7 +426,7 @@ def __init__(self, module, wave_size: int) -> None:
# Returns True if the given IP can be executed for the given list of lanes.
def _is_task_candidate(self, ip: InstructionPointer, lanes: List[Lane]):
- merged_lanes: set[int] = set()
+ merged_lanes: Set[int] = set()
for lane in self._lanes:
if not lane.running():
merged_lanes.add(lane.tid())
diff --git a/polly/lib/External/isl/isl_test_python.py b/polly/lib/External/isl/isl_test_python.py
index 05bb0c8246421..30dd02789a8ea 100755
--- a/polly/lib/External/isl/isl_test_python.py
+++ b/polly/lib/External/isl/isl_test_python.py
@@ -6,6 +6,7 @@
import sys
import isl
+from typing import List
# Test that isl objects can be constructed.
#
@@ -210,12 +211,12 @@ def add(bs):
s.foreach_basic_set(add)
assert len(list) == 3
- assert list[0].is_subset(s)
- assert list[1].is_subset(s)
- assert list[2].is_subset(s)
- assert not list[0].is_equal(list[1])
- assert not list[0].is_equal(list[2])
- assert not list[1].is_equal(list[2])
+ assert List[0].is_subset(s)
+ assert List[1].is_subset(s)
+ assert List[2].is_subset(s)
+ assert not List[0].is_equal(List[1])
+ assert not List[0].is_equal(List[2])
+ assert not List[1].is_equal(List[2])
def fail(bs):
raise Exception("fail")
diff --git a/third-party/benchmark/tools/strip_asm.py b/third-party/benchmark/tools/strip_asm.py
index bc3a774a79320..a37f61aad6cde 100755
--- a/third-party/benchmark/tools/strip_asm.py
+++ b/third-party/benchmark/tools/strip_asm.py
@@ -8,6 +8,7 @@
import re
import sys
from argparse import ArgumentParser
+from typing import List
def find_used_labels(asm):
@@ -106,7 +107,7 @@ def process_asm(asm):
r"\s*\.(string|asciz|ascii|[1248]?byte|short|word|long|quad|value|zero)"
),
]
- keep_regexes: list[re.Pattern] = []
+ keep_regexes: List[re.Pattern] = []
fn_label_def = re.compile("^[a-zA-Z_][a-zA-Z0-9_.]*:")
for line in asm.splitlines():
# Remove Mach-O attribute
More information about the libcxx-commits
mailing list