[llvm] update_test_checks: keep meta variables stable by default (PR #76748)

Nicolai Hähnle via llvm-commits llvm-commits at lists.llvm.org
Mon Mar 4 10:57:54 PST 2024


================
@@ -1176,20 +1214,236 @@ def may_clash_with_default_check_prefix_name(check_prefix, var):
     )
 
 
+VARIABLE_TAG = "[[@@]]"
+METAVAR_RE = re.compile(r"\[\[([A-Z0-9_]+)(?::[^]]+)?\]\]")
+NUMERIC_SUFFIX_RE = re.compile(r"[0-9]*$")
+
+
+class CheckValueInfo:
+    def __init__(
+        self,
+        nameless_value: NamelessValue,
+        var: str,
+        prefix: str,
+    ):
+        self.nameless_value = nameless_value
+        self.var = var
+        self.prefix = prefix
+
+
+class CheckLineInfo:
+    def __init__(self, line, values):
+        self.line: str = line
+        self.values: List[CheckValueInfo] = values
+
+    def __repr__(self):
+        return f"CheckLineInfo(line={self.line}, self.values={self.values})"
+
+
+def remap_metavar_names(
+    orig_line_infos: List[CheckLineInfo],
+    new_line_infos: List[CheckLineInfo],
+    committed_names: Set[str],
+) -> Mapping[str, str]:
+    """
+    Map all FileCheck variable names that appear in new_line_infos to new
+    FileCheck variable names in an attempt to reduce the diff from orig_line_infos
+    to new_line_infos.
+    """
+    # Initialize uncommitted identity mappings
+    new_mapping = {}
+    for line in new_line_infos:
+        for value in line.values:
+            new_mapping[value.var] = value.var
+
+    # Recursively commit to the identity mapping or find a better one
+    def recurse(
+        orig_line_infos: List[CheckLineInfo], new_line_infos: List[CheckLineInfo]
+    ):
+        if not new_line_infos or not orig_line_infos:
+            return
+
+        lines = set()
+
+        # Search for lines that are identical on both sides, including meta
+        # variable names, and commit to those names immediately
+        for line in orig_line_infos:
+            key = (line.line.strip(), tuple(value.var for value in line.values))
+            lines.add(key)
+
+        for line in new_line_infos:
+            key = (
+                line.line.strip(),
+                tuple(new_mapping[value.var] for value in line.values),
+            )
+            if key in lines:
+                for value in line.values:
+                    committed_names.add(new_mapping[value.var])
+
+        # Search for lines that are unique on both sides if we only consider
+        # variable names that have been committed.
+        lines = collections.defaultdict(lambda: [None, None])
+        for i, line in enumerate(orig_line_infos):
+            key = (
+                line.line.strip(),
+                tuple(
+                    value.var for value in line.values if value.var in committed_names
+                ),
+            )
+            entry = lines[key]
+            if entry[0] is None:
+                entry[0] = i
+            else:
+                entry[0] = False
+
+        for i, line in enumerate(new_line_infos):
+            key = (
+                line.line.strip(),
+                tuple(
+                    new_mapping[value.var]
+                    for value in line.values
+                    if new_mapping[value.var] in committed_names
+                ),
+            )
+            entry = lines[key]
+            if entry[1] is None:
+                entry[1] = i
+            else:
+                entry[1] = False
+
+        unique_matches = []
+        for entry in lines.values():
+            if (
+                entry[0] is not None
+                and entry[0] is not False
+                and entry[1] is not None
+                and entry[1] is not False
+            ):
+                unique_matches.append((entry[0], entry[1]))
+
+        if not unique_matches:
+            # There are no unique matches. This is the recursion base case.
+            return
+
+        # Compute a maximal crossing-free matching via dynamic programming
+        unique_matches.sort(key=lambda entry: entry[0])
+
+        backlinks = []
+        table = []
+        for _, new_idx in unique_matches:
+            ti = bisect.bisect_left(table, new_idx, key=lambda entry: entry[0])
+            if ti < len(table):
+                table[ti] = (new_idx, len(backlinks))
+            else:
+                table.append((new_idx, len(backlinks)))
+            if ti > 0:
+                backlinks.append(table[ti - 1][1])
+            else:
+                backlinks.append(None)
----------------
nhaehnle wrote:

Yeah, you're missing something because this isn't *quite* a dynamic programming algorithm. It started out as one, but it's really more of a line-sweeping algorithm. I'm going to add a ~40 line comment explaining this.

https://github.com/llvm/llvm-project/pull/76748


More information about the llvm-commits mailing list