[llvm] update_test_checks: keep meta variables stable by default (PR #76748)

Nicolai Hähnle via llvm-commits llvm-commits at lists.llvm.org
Thu Mar 7 13:50:30 PST 2024


https://github.com/nhaehnle updated https://github.com/llvm/llvm-project/pull/76748

>From fe92552b73580db422f2b23b34d8ab320be9dc08 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Thu, 23 Nov 2023 05:34:05 +0100
Subject: [PATCH 1/8] update_test_checks: precommit a test case

The test case demonstrates how meta variables are needlessly renamed,
making diffs harder to read.
---
 .../Inputs/stable_ir_values.ll                | 22 ++++++++++++++++++
 .../Inputs/stable_ir_values.ll.expected       | 23 +++++++++++++++++++
 .../update_test_checks/stable_ir_values.test  |  2 ++
 3 files changed, 47 insertions(+)
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test

diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll
new file mode 100644
index 00000000000000..8457bf7dc40a2e
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll
@@ -0,0 +1,22 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; The assumption underlying this test is that there are pre-existing check lines
+; but something has changed, and we would like to avoid needless changes of
+; meta variable names so that diffs end up being easier to read, e.g. avoid
+; changing X_I33 into X_I34 or renumbering the various TMP variables.
+
+define i32 @func({i32, i32} %x, i32 %y) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: { i32, i32 } [[X:%.*]], i32 [[Y:%.*]]) {
+; CHECK-NEXT:    [[X_I33:%.*]] = extractvalue { i32, i32 } [[X]], 0
+; CHECK-NEXT:    [[TMP1:%.*]] = add i32 [[X_I33]], [[Y]]
+; CHECK-NEXT:    [[TMP2:%.*]] = mul i32 [[TMP1]], 3
+; CHECK-NEXT:    ret i32 [[TMP2]]
+;
+  %x.i34 = extractvalue {i32, i32} %x, 0
+  %1 = add i32 %y, 1
+  %2 = add i32 %x.i34, %1
+  %3 = mul i32 %2, 3
+  ret i32 %3
+}
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected
new file mode 100644
index 00000000000000..5142e3ed32ba45
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected
@@ -0,0 +1,23 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; The assumption underlying this test is that there are pre-existing check lines
+; but something has changed, and we would like to avoid needless changes of
+; meta variable names so that diffs end up being easier to read, e.g. avoid
+; changing X_I33 into X_I34 or renumbering the various TMP variables.
+
+define i32 @func({i32, i32} %x, i32 %y) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: { i32, i32 } [[X:%.*]], i32 [[Y:%.*]]) {
+; CHECK-NEXT:    [[X_I34:%.*]] = extractvalue { i32, i32 } [[X]], 0
+; CHECK-NEXT:    [[TMP1:%.*]] = add i32 [[Y]], 1
+; CHECK-NEXT:    [[TMP2:%.*]] = add i32 [[X_I34]], [[TMP1]]
+; CHECK-NEXT:    [[TMP3:%.*]] = mul i32 [[TMP2]], 3
+; CHECK-NEXT:    ret i32 [[TMP3]]
+;
+  %x.i34 = extractvalue {i32, i32} %x, 0
+  %1 = add i32 %y, 1
+  %2 = add i32 %x.i34, %1
+  %3 = mul i32 %2, 3
+  ret i32 %3
+}
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test
new file mode 100644
index 00000000000000..c6287a6b29ca92
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test
@@ -0,0 +1,2 @@
+# RUN: cp -f %S/Inputs/stable_ir_values.ll %t.ll && %update_test_checks %t.ll
+# RUN: diff -u %t.ll %S/Inputs/stable_ir_values.ll.expected

>From babfde543a3d9b68ba861723cc25670846cb7fd4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Sat, 23 Dec 2023 14:07:47 +0100
Subject: [PATCH 2/8] update_test_checks: simplify is_local_def_ir_value

The match argument is unused.
---
 llvm/utils/UpdateTestChecks/common.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index 53777523ec2a58..72d21cf4ca7dbc 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -933,7 +933,7 @@ def __init__(
         self.variable_mapping = {}
 
     # Return true if this kind of IR value is "local", basically if it matches '%{{.*}}'.
-    def is_local_def_ir_value_match(self, match):
+    def is_local_def_ir_value(self):
         return self.ir_prefix == "%"
 
     # Return true if this kind of IR value is "global", basically if it matches '#{{.*}}'.
@@ -949,7 +949,7 @@ def get_ir_prefix_from_ir_value_match(self, match):
     # Return the IR regexp we use for this kind or IR value, e.g., [\w.-]+? for locals
     def get_ir_regex_from_ir_value_re_match(self, match):
         # for backwards compatibility we check locals with '.*'
-        if self.is_local_def_ir_value_match(match):
+        if self.is_local_def_ir_value():
             return ".*"
         return self.ir_regexp
 
@@ -990,7 +990,7 @@ def get_value_definition(self, var, match):
         else:
             regex = self.get_ir_regex_from_ir_value_re_match(match)
             capture_start = "[["
-        if self.is_local_def_ir_value_match(match):
+        if self.is_local_def_ir_value():
             return capture_start + varname + ":" + prefix + regex + "]]"
         return prefix + capture_start + varname + ":" + regex + "]]"
 
@@ -999,7 +999,7 @@ def get_value_use(self, var, match, var_prefix=None):
         if var_prefix is None:
             var_prefix = self.check_prefix
         capture_start = "[[#" if self.is_number else "[["
-        if self.is_local_def_ir_value_match(match):
+        if self.is_local_def_ir_value():
             return capture_start + self.get_value_name(var, var_prefix) + "]]"
         prefix = self.get_ir_prefix_from_ir_value_match(match)[0]
         return prefix + capture_start + self.get_value_name(var, var_prefix) + "]]"
@@ -1209,7 +1209,7 @@ def transform_line_vars(match):
                 " with scripted FileCheck name." % (var,)
             )
         key = (var, nameless_value.check_key)
-        is_local_def = nameless_value.is_local_def_ir_value_match(match)
+        is_local_def = nameless_value.is_local_def_ir_value()
         if is_local_def and key in vars_seen:
             rv = nameless_value.get_value_use(var, match)
         elif not is_local_def and key in global_vars_seen:

>From cf7f48db23f43e1d1c809616b2deff0b1ec29773 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Sat, 23 Dec 2023 14:20:43 +0100
Subject: [PATCH 3/8] update_test_checks: simplify get_ir_regex

The match argument isn't used.
---
 llvm/utils/UpdateTestChecks/common.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index 72d21cf4ca7dbc..a3365fef5f6e7d 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -947,7 +947,7 @@ def get_ir_prefix_from_ir_value_match(self, match):
         return re.search(self.ir_prefix, match[0])[0], self.check_prefix
 
     # Return the IR regexp we use for this kind or IR value, e.g., [\w.-]+? for locals
-    def get_ir_regex_from_ir_value_re_match(self, match):
+    def get_ir_regex(self):
         # for backwards compatibility we check locals with '.*'
         if self.is_local_def_ir_value():
             return ".*"
@@ -988,7 +988,7 @@ def get_value_definition(self, var, match):
             regex = ""  # always capture a number in the default format
             capture_start = "[[#"
         else:
-            regex = self.get_ir_regex_from_ir_value_re_match(match)
+            regex = self.get_ir_regex()
             capture_start = "[["
         if self.is_local_def_ir_value():
             return capture_start + varname + ":" + prefix + regex + "]]"

>From 3bdf61e8693af08757a5c04cb74359f73756fb3e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Thu, 23 Nov 2023 06:46:07 +0100
Subject: [PATCH 4/8] update_test_checks: keep meta variables stable by default

Prior to this change, running UTC on larger tests, especially tests
with unnamed IR values, often resulted in a spuriously large diff
because e.g. TMPnn variables in the CHECK lines were renumbered. This
change attempts to reduce the diff by keeping those variable names the
same.

There are cases in which this "drift" of variable names can end up being
more confusing. The old behavior can be re-enabled with the
--reset-variable-names command line argument.

The improvement may not be immediately apparent in the diff of this change.
The point is that the diff of stable_ir_values.ll against
stable_ir_values.ll.expected after this change is smaller.

Ideally, we'd also keep meta variables for "global" objects stable, e.g.
for attributes (#nn) and metadata (!nn). However, that would require a
much more substantial refactoring of how we generate check lines, so I
left it for future work.
---
 .../Inputs/stable_ir_values.ll.expected       |  10 +-
 .../Inputs/stable_ir_values.ll.expected.reset |  23 +
 .../update_test_checks/stable_ir_values.test  |   3 +
 llvm/utils/UpdateTestChecks/common.py         | 486 +++++++++++++++++-
 llvm/utils/update_test_checks.py              |  22 +-
 5 files changed, 517 insertions(+), 27 deletions(-)
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected.reset

diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected
index 5142e3ed32ba45..3549a4d76aa762 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected
@@ -9,11 +9,11 @@
 define i32 @func({i32, i32} %x, i32 %y) {
 ; CHECK-LABEL: define i32 @func(
 ; CHECK-SAME: { i32, i32 } [[X:%.*]], i32 [[Y:%.*]]) {
-; CHECK-NEXT:    [[X_I34:%.*]] = extractvalue { i32, i32 } [[X]], 0
-; CHECK-NEXT:    [[TMP1:%.*]] = add i32 [[Y]], 1
-; CHECK-NEXT:    [[TMP2:%.*]] = add i32 [[X_I34]], [[TMP1]]
-; CHECK-NEXT:    [[TMP3:%.*]] = mul i32 [[TMP2]], 3
-; CHECK-NEXT:    ret i32 [[TMP3]]
+; CHECK-NEXT:    [[X_I33:%.*]] = extractvalue { i32, i32 } [[X]], 0
+; CHECK-NEXT:    [[TMP3:%.*]] = add i32 [[Y]], 1
+; CHECK-NEXT:    [[TMP1:%.*]] = add i32 [[X_I33]], [[TMP3]]
+; CHECK-NEXT:    [[TMP2:%.*]] = mul i32 [[TMP1]], 3
+; CHECK-NEXT:    ret i32 [[TMP2]]
 ;
   %x.i34 = extractvalue {i32, i32} %x, 0
   %1 = add i32 %y, 1
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected.reset b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected.reset
new file mode 100644
index 00000000000000..5142e3ed32ba45
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values.ll.expected.reset
@@ -0,0 +1,23 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; The assumption underlying this test is that there are pre-existing check lines
+; but something has changed, and we would like to avoid needless changes of
+; meta variable names so that diffs end up being easier to read, e.g. avoid
+; changing X_I33 into X_I34 or renumbering the various TMP variables.
+
+define i32 @func({i32, i32} %x, i32 %y) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: { i32, i32 } [[X:%.*]], i32 [[Y:%.*]]) {
+; CHECK-NEXT:    [[X_I34:%.*]] = extractvalue { i32, i32 } [[X]], 0
+; CHECK-NEXT:    [[TMP1:%.*]] = add i32 [[Y]], 1
+; CHECK-NEXT:    [[TMP2:%.*]] = add i32 [[X_I34]], [[TMP1]]
+; CHECK-NEXT:    [[TMP3:%.*]] = mul i32 [[TMP2]], 3
+; CHECK-NEXT:    ret i32 [[TMP3]]
+;
+  %x.i34 = extractvalue {i32, i32} %x, 0
+  %1 = add i32 %y, 1
+  %2 = add i32 %x.i34, %1
+  %3 = mul i32 %2, 3
+  ret i32 %3
+}
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test
index c6287a6b29ca92..4dfaf5d25c8a69 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values.test
@@ -1,2 +1,5 @@
 # RUN: cp -f %S/Inputs/stable_ir_values.ll %t.ll && %update_test_checks %t.ll
 # RUN: diff -u %t.ll %S/Inputs/stable_ir_values.ll.expected
+# Now test that we can reset all the names
+# RUN: %update_test_checks %t.ll --reset-variable-names
+# RUN: diff -u %t.ll %S/Inputs/stable_ir_values.ll.expected.reset
diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index a3365fef5f6e7d..5eaf225d490705 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -1,6 +1,8 @@
 from __future__ import print_function
 
 import argparse
+import bisect
+import collections
 import copy
 import glob
 import itertools
@@ -10,7 +12,7 @@
 import sys
 import shlex
 
-from typing import List
+from typing import List, Mapping, Set
 
 ##### Common utilities for update_*test_checks.py
 
@@ -420,6 +422,50 @@ def should_add_line_to_output(
     return True
 
 
+def collect_original_check_lines(ti: TestInfo, prefix_set: set):
+    """
+    Collect pre-existing check lines into a dictionary `result` which is
+    returned.
+
+    result[func_name][prefix] is filled with a list of right-hand-sides of check
+    lines.
+    """
+    result = {}
+
+    current_function = None
+    for input_line_info in ti.ro_iterlines():
+        input_line = input_line_info.line
+        if current_function is not None:
+            if input_line == "":
+                continue
+            if input_line.lstrip().startswith(";"):
+                m = CHECK_RE.match(input_line)
+                if (
+                    m is not None
+                    and m.group(1) in prefix_set
+                    and m.group(2) not in ["LABEL", "SAME"]
+                ):
+                    if m.group(1) not in current_function:
+                        current_function[m.group(1)] = []
+                    current_function[m.group(1)].append(
+                        input_line[m.end() :].strip()
+                    )
+                continue
+            current_function = None
+
+        m = IR_FUNCTION_RE.match(input_line)
+        if m is not None:
+            func_name = m.group(1)
+            if ti.args.function is not None and func_name != ti.args.function:
+                # When filtering on a specific function, skip all others.
+                continue
+
+            assert func_name not in result
+            current_function = result[func_name] = {}
+    
+    return result
+
+
 # Perform lit-like substitutions
 def getSubstitutions(sourcepath):
     sourcedir = os.path.dirname(sourcepath)
@@ -491,7 +537,7 @@ def invoke_tool(exe, cmd_args, ir, preprocess_cmd=None, verbose=False):
 CHECK_PREFIX_RE = re.compile(r"--?check-prefix(?:es)?[= ](\S+)")
 PREFIX_RE = re.compile("^[a-zA-Z0-9_-]+$")
 CHECK_RE = re.compile(
-    r"^\s*(?://|[;#])\s*([^:]+?)(?:-NEXT|-NOT|-DAG|-LABEL|-SAME|-EMPTY)?:"
+    r"^\s*(?://|[;#])\s*([^:]+?)(?:-(NEXT|NOT|DAG|LABEL|SAME|EMPTY))?:"
 )
 CHECK_SAME_RE = re.compile(r"^\s*(?://|[;#])\s*([^:]+?)(?:-SAME)?:")
 
@@ -1187,20 +1233,296 @@ def may_clash_with_default_check_prefix_name(check_prefix, var):
     )
 
 
+VARIABLE_TAG = "[[@@]]"
+METAVAR_RE = re.compile(r"\[\[([A-Z0-9_]+)(?::[^]]+)?\]\]")
+NUMERIC_SUFFIX_RE = re.compile(r"[0-9]*$")
+
+
+class CheckValueInfo:
+    def __init__(
+        self,
+        nameless_value: NamelessValue,
+        var: str,
+        prefix: str,
+    ):
+        self.nameless_value = nameless_value
+        self.var = var
+        self.prefix = prefix
+
+
+# Represent a check line in a way that allows us to compare check lines while
+# ignoring some or all of the FileCheck variable names.
+class CheckLineInfo:
+    def __init__(self, line, values):
+        # Line with all FileCheck variable name occurrences replaced by VARIABLE_TAG
+        self.line: str = line
+
+        # Information on each FileCheck variable name occurrences in the line
+        self.values: List[CheckValueInfo] = values
+
+    def __repr__(self):
+        return f"CheckLineInfo(line={self.line}, self.values={self.values})"
+
+
+def remap_metavar_names(
+    orig_line_infos: List[CheckLineInfo],
+    new_line_infos: List[CheckLineInfo],
+    committed_names: Set[str],
+) -> Mapping[str, str]:
+    """
+    Map all FileCheck variable names that appear in new_line_infos to new
+    FileCheck variable names in an attempt to reduce the diff from orig_line_infos
+    to new_line_infos.
+
+    This is done by:
+    * Committing variable names that participate in fully unchanged lines
+      (regardless of how these lines are ordered).
+    * Attempt to cause additional lines to "become unchanged" by matching
+      similar lines using a diffing algorithm.
+    * Recursing into intervals between the matched lines.
+    """
+    # Initialize uncommitted identity mappings
+    new_mapping = {}
+    for line in new_line_infos:
+        for value in line.values:
+            new_mapping[value.var] = value.var
+
+    # Recursively commit to the identity mapping or find a better one
+    def recurse(
+        orig_line_infos: List[CheckLineInfo], new_line_infos: List[CheckLineInfo]
+    ):
+        if not new_line_infos or not orig_line_infos:
+            return
+
+        lines = set()
+
+        # Search for lines that are identical on both sides, including meta
+        # variable names, and commit to those names immediately
+        for line in orig_line_infos:
+            key = (line.line.strip(), tuple(value.var for value in line.values))
+            lines.add(key)
+
+        for line in new_line_infos:
+            key = (
+                line.line.strip(),
+                tuple(new_mapping[value.var] for value in line.values),
+            )
+            if key in lines:
+                for value in line.values:
+                    committed_names.add(new_mapping[value.var])
+
+        # Search for lines that are unique on both sides if we only consider
+        # variable names that have been committed.
+        lines = collections.defaultdict(lambda: [None, None])
+        for i, line in enumerate(orig_line_infos):
+            key = (
+                line.line.strip(),
+                tuple(
+                    value.var for value in line.values if value.var in committed_names
+                ),
+            )
+            entry = lines[key]
+            if entry[0] is None:
+                entry[0] = i
+            else:
+                entry[0] = False
+
+        for i, line in enumerate(new_line_infos):
+            key = (
+                line.line.strip(),
+                tuple(
+                    new_mapping[value.var]
+                    for value in line.values
+                    if new_mapping[value.var] in committed_names
+                ),
+            )
+            entry = lines[key]
+            if entry[1] is None:
+                entry[1] = i
+            else:
+                entry[1] = False
+
+        unique_matches = []
+        for entry in lines.values():
+            if (
+                entry[0] is not None
+                and entry[0] is not False
+                and entry[1] is not None
+                and entry[1] is not False
+            ):
+                unique_matches.append((entry[0], entry[1]))
+
+        if not unique_matches:
+            # There are no unique matches. This is the recursion base case.
+            return
+
+        # Compute a maximal crossing-free matching via an algorithm that is
+        # inspired by a mixture of dynamic programming and line-sweeping in
+        # discrete geometry.
+        #
+        # I would be surprised if this algorithm didn't exist somewhere in the
+        # literature, but I found it without consciously recalling any
+        # references, so you'll have to make do with the explanation below.
+        # Sorry.
+        #
+        # The underlying graph is bipartite:
+        #  - nodes on the LHS represent lines in the original check
+        #  - nodes on the RHS represent lines in the new (updated) check
+        #
+        # Nodes are implicitly sorted by the corresponding line number.
+        # Edges (unique_matches) are sorted by the line number on the LHS.
+        #
+        # Here's the geometric intuition for the algorithm.
+        #
+        #  * Plot the edges as points in the plane, with the original line
+        #    number on the X axis and the updated line number on the Y axis.
+        #  * The goal is to find a longest "chain" of points where each point
+        #    is strictly above and to the right of the previous point.
+        #  * The algorithm proceeds by sweeping a vertical line from left to
+        #    right.
+        #  * The algorithm maintains a table where `table[N]` answers the
+        #    question "What is currently the 'best' way to build a chain of N+1
+        #    points to the left of the vertical line". Here, 'best' means
+        #    that the last point of the chain is a as low as possible (minimal
+        #    Y coordinate).
+        #   * `table[N]` is `(y, point_idx)` where `point_idx` is the index of
+        #     the last point in the chain and `y` is its Y coordinate
+        #   * A key invariant is that the Y values in the table are
+        #     monotonically increasing
+        #  * Thanks to these properties, the table can be used to answer the
+        #    question "What is the longest chain that can be built to the left
+        #    of the vertical line using only points below a certain Y value",
+        #    using a binary search over the table.
+        #  * The algorithm also builds a backlink structure in which every point
+        #    links back to the previous point on a best (longest) chain ending
+        #    at that point
+        #
+        # The core loop of the algorithm sweeps the line and updates the table
+        # and backlink structure for every point that we cross during the sweep.
+        # Therefore, the algorithm is trivially O(M log M) in the number of
+        # points. Since we only consider lines that are unique, it is log-linear
+        # in the problem size.
+        unique_matches.sort(key=lambda entry: entry[0])
+
+        backlinks = []
+        table = []
+        for _, new_idx in unique_matches:
+            ti = bisect.bisect_left(table, new_idx, key=lambda entry: entry[0])
+            if ti < len(table):
+                table[ti] = (new_idx, len(backlinks))
+            else:
+                table.append((new_idx, len(backlinks)))
+            if ti > 0:
+                backlinks.append(table[ti - 1][1])
+            else:
+                backlinks.append(None)
+
+        # Commit to names in the matching, re-checking compatibility as we go along
+        match_idx = table[-1][1]
+        matches = []
+        while match_idx is not None:
+            orig_idx, new_idx = unique_matches[match_idx]
+            local_commits = {}
+
+            for orig_value, new_value in zip(
+                orig_line_infos[orig_idx].values, new_line_infos[new_idx].values
+            ):
+                if new_mapping[new_value.var] in committed_names:
+                    # The new value has already been committed by a previous match we considered
+                    # during the outer loop. If it was mapped to the same name as the original value,
+                    # we can consider committing other values from this line. Otherwise, we should
+                    # ignore this line.
+                    if new_mapping[new_value.var] == orig_value.var:
+                        continue
+                    else:
+                        break
+
+                if new_value.var in local_commits:
+                    # Same, but for a possible commit happening on the same line
+                    if local_commits[new_value.var] == orig_value.var:
+                        continue
+                    else:
+                        break
+
+                if orig_value.var in committed_names:
+                    # We can't map this value because the name we would map it to has already been
+                    # committed for something else. Give up on this line.
+                    break
+
+                local_commits[new_value.var] = orig_value.var
+            else:
+                # No reason not to add any commitments for this line
+                for new_var, orig_var in local_commits.items():
+                    new_mapping[new_var] = orig_var
+                    committed_names.add(orig_var)
+
+                    if (
+                        orig_var != new_var
+                        and orig_var in new_mapping
+                        and new_mapping[orig_var] == orig_var
+                    ):
+                        new_mapping[orig_var] = "conflict_" + orig_var
+
+                matches.append((orig_idx, new_idx))
+
+            match_idx = backlinks[match_idx]
+
+        # Recurse into the intervals between matches. Add sentinels to simplify
+        # the iteration over intervals.
+        matches.append((-1, -1))
+        matches.reverse()
+        matches.append((len(orig_line_infos), len(new_line_infos)))
+
+        for ((left_orig_idx, left_new_idx), (right_orig_idx, right_new_idx)) in zip(matches, matches[1:]):
+            assert left_orig_idx < right_orig_idx
+            assert left_new_idx < right_new_idx
+            recurse(
+                orig_line_infos[left_orig_idx + 1 : right_orig_idx],
+                new_line_infos[left_new_idx + 1 : right_new_idx],
+            )
+
+    recurse(orig_line_infos, new_line_infos)
+
+    # Commit to remaining names and resolve conflicts
+    for new_name, mapped_name in new_mapping.items():
+        if mapped_name in committed_names:
+            continue
+        if not mapped_name.startswith("conflict_"):
+            assert mapped_name == new_name
+            committed_names.add(mapped_name)
+
+    for new_name, mapped_name in new_mapping.items():
+        if mapped_name in committed_names:
+            continue
+        assert mapped_name.startswith("conflict_")
+
+        m = NUMERIC_SUFFIX_RE.search(new_name)
+        base_name = new_name[: m.start()]
+        suffix = int(new_name[m.start() :]) if m.start() != m.end() else 1
+        while True:
+            candidate = f"{base_name}{suffix}"
+            if candidate not in committed_names:
+                new_mapping[new_name] = candidate
+                break
+            suffix += 1
+
+    return new_mapping
+
 def generalize_check_lines_common(
     lines,
     is_analyze,
     vars_seen,
     global_vars_seen,
     nameless_values,
-    nameless_value_regex,
+    nameless_value_regex: re.Pattern,
     is_asm,
     preserve_names,
+    original_check_lines: List[str] | None = None,
 ):
     # This gets called for each match that occurs in
     # a line. We transform variables we haven't seen
     # into defs, and variables we have seen into uses.
-    def transform_line_vars(match):
+    def transform_line_vars(match, transform_locals=True):
         var = get_name_from_ir_value_match(match)
         nameless_value = get_nameless_value_from_match(match, nameless_values)
         if may_clash_with_default_check_prefix_name(nameless_value.check_prefix, var):
@@ -1210,6 +1532,8 @@ def transform_line_vars(match):
             )
         key = (var, nameless_value.check_key)
         is_local_def = nameless_value.is_local_def_ir_value()
+        if is_local_def and not transform_locals:
+            return None
         if is_local_def and key in vars_seen:
             rv = nameless_value.get_value_use(var, match)
         elif not is_local_def and key in global_vars_seen:
@@ -1228,13 +1552,15 @@ def transform_line_vars(match):
         # including the commas and spaces.
         return match.group(1) + rv + match.group(match.lastindex)
 
-    lines_with_def = []
+    def transform_non_local_line_vars(match):
+        return transform_line_vars(match, False)
+
     multiple_braces_re = re.compile(r"({{+)|(}}+)")
     def escape_braces(match_obj):
         return '{{' + re.escape(match_obj.group(0)) + '}}'
 
-    for i, line in enumerate(lines):
-        if not is_asm and not is_analyze:
+    if not is_asm and not is_analyze:
+        for i, line in enumerate(lines):
             # An IR variable named '%.' matches the FileCheck regex string.
             line = line.replace("%.", "%dot")
             for regex in _global_hex_value_regex:
@@ -1252,25 +1578,136 @@ def escape_braces(match_obj):
             # Ignore any comments, since the check lines will too.
             scrubbed_line = SCRUB_IR_COMMENT_RE.sub(r"", line)
             lines[i] = scrubbed_line
-        if not preserve_names:
-            # It can happen that two matches are back-to-back and for some reason sub
-            # will not replace both of them. For now we work around this by
-            # substituting until there is no more match.
-            changed = True
-            while changed:
-                (lines[i], changed) = nameless_value_regex.subn(
-                    transform_line_vars, lines[i], count=1
-                )
-        if is_analyze:
+
+    if not preserve_names:
+        if is_asm:
+            for i, _ in enumerate(lines):
+                # It can happen that two matches are back-to-back and for some reason sub
+                # will not replace both of them. For now we work around this by
+                # substituting until there is no more match.
+                changed = True
+                while changed:
+                    (lines[i], changed) = nameless_value_regex.subn(
+                        transform_line_vars, lines[i], count=1
+                    )
+        else:
+            # LLVM IR case. Start by handling global meta variables (global IR variables,
+            # metadata, attributes)
+            for i, _ in enumerate(lines):
+                start = 0
+                while True:
+                    m = nameless_value_regex.search(lines[i][start:])
+                    if m is None:
+                        break
+                    start += m.start()
+                    sub = transform_non_local_line_vars(m)
+                    if sub is not None:
+                        lines[i] = (
+                            lines[i][:start] + sub + lines[i][start + len(m.group(0)) :]
+                        )
+                    start += 1
+
+            # Collect information about new check lines and original check lines (if any)
+            new_line_infos = []
+            for line in lines:
+                filtered_line = ""
+                values = []
+                while True:
+                    m = nameless_value_regex.search(line)
+                    if m is None:
+                        filtered_line += line
+                        break
+
+                    var = get_name_from_ir_value_match(m)
+                    nameless_value = get_nameless_value_from_match(m, nameless_values)
+                    var = nameless_value.get_value_name(
+                        var, nameless_value.check_prefix
+                    )
+
+                    # Replace with a [[@@]] tag, but be sure to keep the spaces and commas.
+                    filtered_line += (
+                        line[: m.start()]
+                        + m.group(1)
+                        + VARIABLE_TAG
+                        + m.group(m.lastindex)
+                    )
+                    line = line[m.end() :]
+                    values.append(
+                        CheckValueInfo(
+                            nameless_value=nameless_value,
+                            var=var,
+                            prefix=nameless_value.get_ir_prefix_from_ir_value_match(m)[
+                                0
+                            ],
+                        )
+                    )
+                new_line_infos.append(CheckLineInfo(filtered_line, values))
+
+            orig_line_infos = []
+            for line in original_check_lines or []:
+                filtered_line = ""
+                values = []
+                while True:
+                    m = METAVAR_RE.search(line)
+                    if m is None:
+                        filtered_line += line
+                        break
+
+                    # Replace with a [[@@]] tag, but be sure to keep the spaces and commas.
+                    filtered_line += line[: m.start()] + VARIABLE_TAG
+                    line = line[m.end() :]
+                    values.append(
+                        CheckValueInfo(
+                            nameless_value=None,
+                            var=m.group(1),
+                            prefix=None,
+                        )
+                    )
+                orig_line_infos.append(CheckLineInfo(filtered_line, values))
+
+            # Compute the variable name mapping
+            committed_names = set(vars_seen)
+
+            mapping = remap_metavar_names(
+                orig_line_infos, new_line_infos, committed_names
+            )
+
+            for i, line_info in enumerate(new_line_infos):
+                line_template = line_info.line
+                line = ""
+
+                for value in line_info.values:
+                    idx = line_template.find(VARIABLE_TAG)
+                    line += line_template[:idx]
+                    line_template = line_template[idx + len(VARIABLE_TAG) :]
+
+                    key = (mapping[value.var], nameless_value.check_key)
+                    is_local_def = nameless_value.is_local_def_ir_value()
+                    if is_local_def:
+                        if mapping[value.var] in vars_seen:
+                            line += f"[[{mapping[value.var]}]]"
+                        else:
+                            line += f"[[{mapping[value.var]}:{value.prefix}{value.nameless_value.get_ir_regex()}]]"
+                            vars_seen.add(mapping[value.var])
+                    else:
+                        raise RuntimeError("not implemented")
+
+                line += line_template
+
+                lines[i] = line
+
+    if is_analyze:
+        for i, _ in enumerate(lines):
             # Escape multiple {{ or }} as {{}} denotes a FileCheck regex.
             scrubbed_line = multiple_braces_re.sub(escape_braces, lines[i])
             lines[i] = scrubbed_line
+
     return lines
 
 
 # Replace IR value defs and uses with FileCheck variables.
 def generalize_check_lines(
-    lines, is_analyze, vars_seen, global_vars_seen, preserve_names
+    lines, is_analyze, vars_seen, global_vars_seen, preserve_names, original_check_lines
 ):
     return generalize_check_lines_common(
         lines,
@@ -1281,6 +1718,7 @@ def generalize_check_lines(
         IR_VALUE_RE,
         False,
         preserve_names,
+        original_check_lines=original_check_lines,
     )
 
 
@@ -1337,6 +1775,7 @@ def add_checks(
     global_vars_seen_dict,
     is_filtered,
     preserve_names=False,
+    original_check_lines: Mapping[str, List[str]] = {},
 ):
     # prefix_exclusions are prefixes we cannot use to print the function because it doesn't exist in run lines that use these prefixes as well.
     prefix_exclusions = set()
@@ -1409,6 +1848,7 @@ def add_checks(
                     vars_seen,
                     global_vars_seen,
                     preserve_names,
+                    original_check_lines=[],
                 )[0]
             func_name_separator = func_dict[checkprefix][func_name].func_name_separator
             if "[[" in args_and_sig:
@@ -1516,7 +1956,12 @@ def add_checks(
             # to variable naming fashions.
             else:
                 func_body = generalize_check_lines(
-                    func_body, False, vars_seen, global_vars_seen, preserve_names
+                    func_body,
+                    False,
+                    vars_seen,
+                    global_vars_seen,
+                    preserve_names,
+                    original_check_lines=original_check_lines.get(checkprefix),
                 )
 
                 # This could be selectively enabled with an optional invocation argument.
@@ -1578,6 +2023,7 @@ def add_ir_checks(
     version,
     global_vars_seen_dict,
     is_filtered,
+    original_check_lines={},
 ):
     # Label format is based on IR string.
     if function_sig and version > 1:
@@ -1602,6 +2048,7 @@ def add_ir_checks(
         global_vars_seen_dict,
         is_filtered,
         preserve_names,
+        original_check_lines=original_check_lines,
     )
 
 
@@ -1890,6 +2337,7 @@ def get_autogennote_suffix(parser, args):
             "llvm_bin",
             "verbose",
             "force_update",
+            "reset_variable_names",
         ):
             continue
         value = getattr(args, action.dest)
diff --git a/llvm/utils/update_test_checks.py b/llvm/utils/update_test_checks.py
index b5077d79351378..04808ce6bb1c6f 100755
--- a/llvm/utils/update_test_checks.py
+++ b/llvm/utils/update_test_checks.py
@@ -85,6 +85,12 @@ def main():
         choices=["none", "smart", "all"],
         help="Check global entries (global variables, metadata, attribute sets, ...) for functions",
     )
+    parser.add_argument(
+        "--reset-variable-names",
+        action="store_true",
+        help="Reset all variable names to correspond closely to the variable names in IR. "
+        "This tends to result in larger diffs.",
+    )
     parser.add_argument("tests", nargs="+")
     initial_args = common.parse_commandline_args(parser)
 
@@ -170,13 +176,19 @@ def main():
             )
             builder.processed_prefixes(prefixes)
 
+        prefix_set = set(
+            [prefix for prefixes, _, _ in prefix_list for prefix in prefixes]
+        )
+
+        if not ti.args.reset_variable_names:
+            original_check_lines = common.collect_original_check_lines(ti, prefix_set)
+        else:
+            original_check_lines = {}
+
         func_dict = builder.finish_and_get_func_dict()
         is_in_function = False
         is_in_function_start = False
         has_checked_pre_function_globals = False
-        prefix_set = set(
-            [prefix for prefixes, _, _ in prefix_list for prefix in prefixes]
-        )
         common.debug("Rewriting FileCheck prefixes:", str(prefix_set))
         output_lines = []
 
@@ -230,6 +242,7 @@ def main():
                         args.version,
                         global_vars_seen_dict,
                         is_filtered=builder.is_filtered(),
+                        original_check_lines=original_check_lines.get(func, {}),
                     ),
                 )
             )
@@ -261,6 +274,9 @@ def main():
                             args.version,
                             global_vars_seen_dict,
                             is_filtered=builder.is_filtered(),
+                            original_check_lines=original_check_lines.get(
+                                func_name, {}
+                            ),
                         )
                     )
                     is_in_function_start = False

>From a7e3b372fd814d7590e85787070e4368d4bd5f08 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Mon, 4 Mar 2024 20:40:15 +0100
Subject: [PATCH 5/8] squash! update_test_checks: precommit a test case

---
 .../Inputs/stable_ir_values2.ll               | 30 +++++++++++++
 .../Inputs/stable_ir_values2.ll.expected      | 26 ++++++++++++
 .../Inputs/stable_ir_values3.ll               | 38 +++++++++++++++++
 .../Inputs/stable_ir_values3.ll.expected      | 38 +++++++++++++++++
 .../Inputs/stable_ir_values4.ll               | 41 ++++++++++++++++++
 .../Inputs/stable_ir_values4.ll.expected      | 42 +++++++++++++++++++
 .../update_test_checks/stable_ir_values2.test |  2 +
 .../update_test_checks/stable_ir_values3.test |  2 +
 .../update_test_checks/stable_ir_values4.test |  2 +
 9 files changed, 221 insertions(+)
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values2.test
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values3.test
 create mode 100644 llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values4.test

diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll
new file mode 100644
index 00000000000000..d05c26241f87c1
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll
@@ -0,0 +1,30 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+define i32 @func(i32 %x) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: i32 [[X:%.*]]) {
+; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i32 [[X]], 0
+; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i1 [[TMP1]])
+; CHECK-NEXT:    [[TMP3:%.*]] = icmp eq i32 [[X]], 1
+; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i1 [[TMP3]])
+; CHECK-NEXT:    [[TMP5:%.*]] = icmp ne i32 [[TMP4]], 0
+; CHECK-NEXT:    [[TMP6:%.*]] = select i1 [[TMP5]], i32 [[TMP4]], i32 [[TMP2]]
+; CHECK-NEXT:    [[TMP7:%.*]] = icmp eq i32 [[X]], 2
+; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i1 [[TMP7]])
+; CHECK-NEXT:    [[TMP9:%.*]] = icmp ne i32 [[TMP8]], 0
+; CHECK-NEXT:    [[TMP10:%.*]] = select i1 [[TMP9]], i32 [[TMP8]], i32 [[TMP6]]
+; CHECK-NEXT:    ret i32 [[TMP10]]
+;
+  %1 = icmp eq i32 %x, 0
+  %2 = call i32 @foo(i1 %1)
+
+  %3 = icmp eq i32 %x, 2
+  %4 = call i32 @foo(i1 %3)
+  %5 = icmp ne i32 %4, 0
+  %6 = select i1 %5, i32 %4, i32 %2
+
+  ret i32 %6
+}
+
+declare i32 @foo(i1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected
new file mode 100644
index 00000000000000..53f60bda8ee591
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected
@@ -0,0 +1,26 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+define i32 @func(i32 %x) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: i32 [[X:%.*]]) {
+; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i32 [[X]], 0
+; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i1 [[TMP1]])
+; CHECK-NEXT:    [[TMP3:%.*]] = icmp eq i32 [[X]], 2
+; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i1 [[TMP3]])
+; CHECK-NEXT:    [[TMP5:%.*]] = icmp ne i32 [[TMP4]], 0
+; CHECK-NEXT:    [[TMP6:%.*]] = select i1 [[TMP5]], i32 [[TMP4]], i32 [[TMP2]]
+; CHECK-NEXT:    ret i32 [[TMP6]]
+;
+  %1 = icmp eq i32 %x, 0
+  %2 = call i32 @foo(i1 %1)
+
+  %3 = icmp eq i32 %x, 2
+  %4 = call i32 @foo(i1 %3)
+  %5 = icmp ne i32 %4, 0
+  %6 = select i1 %5, i32 %4, i32 %2
+
+  ret i32 %6
+}
+
+declare i32 @foo(i1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll
new file mode 100644
index 00000000000000..3b449291d0e7f8
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll
@@ -0,0 +1,38 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; Test that we don't regress diff quality by trying to keep variable names
+; stable (and messing up the matching).
+
+define i32 @func(i32 %x) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: i32 [[X:%.*]]) {
+; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i32 [[X]], 0
+; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i1 [[TMP1]])
+; CHECK-NEXT:    [[TMP3:%.*]] = icmp eq i32 [[X]], 1
+; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i1 [[TMP3]])
+; CHECK-NEXT:    [[TMP5:%.*]] = icmp ne i32 [[TMP4]], 0
+; CHECK-NEXT:    [[TMP6:%.*]] = select i1 [[TMP5]], i32 [[TMP4]], i32 [[TMP2]]
+; CHECK-NEXT:    [[TMP7:%.*]] = icmp eq i32 [[X]], 2
+; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i1 [[TMP7]])
+; CHECK-NEXT:    [[TMP9:%.*]] = icmp ne i32 [[TMP8]], 0
+; CHECK-NEXT:    [[TMP10:%.*]] = select i1 [[TMP9]], i32 [[TMP8]], i32 [[TMP6]]
+; CHECK-NEXT:    ret i32 [[TMP10]]
+;
+  %1 = icmp eq i32 %x, 0
+  %2 = call i32 @foo(i1 %1)
+
+  %3 = icmp eq i32 %x, 2
+  %4 = call i32 @foo(i1 %3)
+  %5 = icmp ne i32 %4, 0
+  %6 = select i1 %5, i32 %4, i32 %2
+
+  %7 = icmp eq i32 %x, 1
+  %8 = call i32 @foo(i1 %7)
+  %9 = icmp ne i32 %8, 0
+  %10 = select i1 %9, i32 %8, i32 %6
+
+  ret i32 %10
+}
+
+declare i32 @foo(i1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected
new file mode 100644
index 00000000000000..3d0f772505a659
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected
@@ -0,0 +1,38 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; Test that we don't regress diff quality by trying to keep variable names
+; stable (and messing up the matching).
+
+define i32 @func(i32 %x) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: i32 [[X:%.*]]) {
+; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i32 [[X]], 0
+; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i1 [[TMP1]])
+; CHECK-NEXT:    [[TMP3:%.*]] = icmp eq i32 [[X]], 2
+; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i1 [[TMP3]])
+; CHECK-NEXT:    [[TMP5:%.*]] = icmp ne i32 [[TMP4]], 0
+; CHECK-NEXT:    [[TMP6:%.*]] = select i1 [[TMP5]], i32 [[TMP4]], i32 [[TMP2]]
+; CHECK-NEXT:    [[TMP7:%.*]] = icmp eq i32 [[X]], 1
+; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i1 [[TMP7]])
+; CHECK-NEXT:    [[TMP9:%.*]] = icmp ne i32 [[TMP8]], 0
+; CHECK-NEXT:    [[TMP10:%.*]] = select i1 [[TMP9]], i32 [[TMP8]], i32 [[TMP6]]
+; CHECK-NEXT:    ret i32 [[TMP10]]
+;
+  %1 = icmp eq i32 %x, 0
+  %2 = call i32 @foo(i1 %1)
+
+  %3 = icmp eq i32 %x, 2
+  %4 = call i32 @foo(i1 %3)
+  %5 = icmp ne i32 %4, 0
+  %6 = select i1 %5, i32 %4, i32 %2
+
+  %7 = icmp eq i32 %x, 1
+  %8 = call i32 @foo(i1 %7)
+  %9 = icmp ne i32 %8, 0
+  %10 = select i1 %9, i32 %8, i32 %6
+
+  ret i32 %10
+}
+
+declare i32 @foo(i1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll
new file mode 100644
index 00000000000000..e3d8452f963101
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll
@@ -0,0 +1,41 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; A test that hits the quadratic runtime prevention in the diff algorithm and
+; a more complex case of name conflict avoidance.
+
+define i32 @func(i32 %x) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: i32 [[X:%.*]]) {
+; CHECK-NEXT:    [[TMP1:%.*]] = mul i32 [[X]], 3
+; CHECK-NEXT:    [[TMP2:%.*]] = add i32 [[TMP1]], 4
+; CHECK-NEXT:    [[TMP3:%.*]] = call i32 @foo(i32 [[TMP2]])
+; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i32 [[TMP3]])
+; CHECK-NEXT:    [[TMP5:%.*]] = call i32 @foo(i32 [[TMP4]])
+; CHECK-NEXT:    [[TMP6:%.*]] = call i32 @foo(i32 [[TMP5]])
+; CHECK-NEXT:    [[TMP7:%.*]] = call i32 @foo(i32 [[TMP6]])
+; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i32 [[TMP7]])
+; CHECK-NEXT:    [[TMP9:%.*]] = call i32 @foo(i32 [[TMP8]])
+; CHECK-NEXT:    [[TMP10:%.*]] = call i32 @foo(i32 [[TMP9]])
+; CHECK-NEXT:    [[TMP11:%.*]] = call i32 @foo(i32 [[TMP10]])
+; CHECK-NEXT:    [[TMP12:%.*]] = call i32 @foo(i32 [[TMP11]])
+; CHECK-NEXT:    ret i32 [[TMP12]]
+;
+  %1 = mul i32 %x, 3
+  %2 = call i32 @foo(i32 %1)
+  %3 = call i32 @foo(i32 %2)
+  %4 = call i32 @foo(i32 %3)
+  %5 = call i32 @foo(i32 %4)
+  %6 = call i32 @foo(i32 %5)
+  %7 = call i32 @foo(i32 %6)
+  %8 = xor i32 %7, 1
+  %9 = call i32 @foo(i32 %8)
+  %10 = add i32 %9, 1
+  %11 = call i32 @foo(i32 %10)
+  %12 = call i32 @foo(i32 %11)
+  %13 = call i32 @foo(i32 %12)
+
+  ret i32 %13
+}
+
+declare i32 @foo(i1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
new file mode 100644
index 00000000000000..5962bdafd9ea0a
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
@@ -0,0 +1,42 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
+; RUN: opt < %s -S | FileCheck %s
+
+; A test that hits the quadratic runtime prevention in the diff algorithm and
+; a more complex case of name conflict avoidance.
+
+define i32 @func(i32 %x) {
+; CHECK-LABEL: define i32 @func(
+; CHECK-SAME: i32 [[X:%.*]]) {
+; CHECK-NEXT:    [[TMP1:%.*]] = mul i32 [[X]], 3
+; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i32 [[TMP1]])
+; CHECK-NEXT:    [[TMP3:%.*]] = call i32 @foo(i32 [[TMP2]])
+; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i32 [[TMP3]])
+; CHECK-NEXT:    [[TMP5:%.*]] = call i32 @foo(i32 [[TMP4]])
+; CHECK-NEXT:    [[TMP6:%.*]] = call i32 @foo(i32 [[TMP5]])
+; CHECK-NEXT:    [[TMP7:%.*]] = call i32 @foo(i32 [[TMP6]])
+; CHECK-NEXT:    [[TMP8:%.*]] = xor i32 [[TMP7]], 1
+; CHECK-NEXT:    [[TMP9:%.*]] = call i32 @foo(i32 [[TMP8]])
+; CHECK-NEXT:    [[TMP10:%.*]] = add i32 [[TMP9]], 1
+; CHECK-NEXT:    [[TMP11:%.*]] = call i32 @foo(i32 [[TMP10]])
+; CHECK-NEXT:    [[TMP12:%.*]] = call i32 @foo(i32 [[TMP11]])
+; CHECK-NEXT:    [[TMP13:%.*]] = call i32 @foo(i32 [[TMP12]])
+; CHECK-NEXT:    ret i32 [[TMP13]]
+;
+  %1 = mul i32 %x, 3
+  %2 = call i32 @foo(i32 %1)
+  %3 = call i32 @foo(i32 %2)
+  %4 = call i32 @foo(i32 %3)
+  %5 = call i32 @foo(i32 %4)
+  %6 = call i32 @foo(i32 %5)
+  %7 = call i32 @foo(i32 %6)
+  %8 = xor i32 %7, 1
+  %9 = call i32 @foo(i32 %8)
+  %10 = add i32 %9, 1
+  %11 = call i32 @foo(i32 %10)
+  %12 = call i32 @foo(i32 %11)
+  %13 = call i32 @foo(i32 %12)
+
+  ret i32 %13
+}
+
+declare i32 @foo(i1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values2.test b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values2.test
new file mode 100644
index 00000000000000..3cebcd52f00521
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values2.test
@@ -0,0 +1,2 @@
+# RUN: cp -f %S/Inputs/stable_ir_values2.ll %t.ll && %update_test_checks %t.ll
+# RUN: diff -u %t.ll %S/Inputs/stable_ir_values2.ll.expected
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values3.test b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values3.test
new file mode 100644
index 00000000000000..83bc80128541f3
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values3.test
@@ -0,0 +1,2 @@
+# RUN: cp -f %S/Inputs/stable_ir_values3.ll %t.ll && %update_test_checks %t.ll
+# RUN: diff -u %t.ll %S/Inputs/stable_ir_values3.ll.expected
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values4.test b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values4.test
new file mode 100644
index 00000000000000..89f252f8078064
--- /dev/null
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/stable_ir_values4.test
@@ -0,0 +1,2 @@
+# RUN: cp -f %S/Inputs/stable_ir_values4.ll %t.ll && %update_test_checks %t.ll
+# RUN: diff -u %t.ll %S/Inputs/stable_ir_values4.ll.expected

>From a30a49ae10e25578839a38e6b610f874b12b6537 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Mon, 4 Mar 2024 21:03:15 +0100
Subject: [PATCH 6/8] squash! update_test_checks: keep meta variables stable by
 default

---
 .../Inputs/stable_ir_values2.ll.expected      |  12 +-
 .../Inputs/stable_ir_values3.ll               |   3 -
 .../Inputs/stable_ir_values3.ll.expected      |   3 -
 .../Inputs/stable_ir_values4.ll.expected      |  14 +-
 llvm/utils/UpdateTestChecks/common.py         | 327 ++++++++++--------
 5 files changed, 187 insertions(+), 172 deletions(-)

diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected
index 53f60bda8ee591..6311a55a1f9de1 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values2.ll.expected
@@ -5,12 +5,12 @@ define i32 @func(i32 %x) {
 ; CHECK-LABEL: define i32 @func(
 ; CHECK-SAME: i32 [[X:%.*]]) {
 ; CHECK-NEXT:    [[TMP1:%.*]] = icmp eq i32 [[X]], 0
-; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i1 [[TMP1]])
-; CHECK-NEXT:    [[TMP3:%.*]] = icmp eq i32 [[X]], 2
-; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i1 [[TMP3]])
-; CHECK-NEXT:    [[TMP5:%.*]] = icmp ne i32 [[TMP4]], 0
-; CHECK-NEXT:    [[TMP6:%.*]] = select i1 [[TMP5]], i32 [[TMP4]], i32 [[TMP2]]
-; CHECK-NEXT:    ret i32 [[TMP6]]
+; CHECK-NEXT:    [[TMP6:%.*]] = call i32 @foo(i1 [[TMP1]])
+; CHECK-NEXT:    [[TMP7:%.*]] = icmp eq i32 [[X]], 2
+; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i1 [[TMP7]])
+; CHECK-NEXT:    [[TMP9:%.*]] = icmp ne i32 [[TMP8]], 0
+; CHECK-NEXT:    [[TMP10:%.*]] = select i1 [[TMP9]], i32 [[TMP8]], i32 [[TMP6]]
+; CHECK-NEXT:    ret i32 [[TMP10]]
 ;
   %1 = icmp eq i32 %x, 0
   %2 = call i32 @foo(i1 %1)
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll
index 3b449291d0e7f8..a4f4fc67f78d3f 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll
@@ -1,9 +1,6 @@
 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
 ; RUN: opt < %s -S | FileCheck %s
 
-; Test that we don't regress diff quality by trying to keep variable names
-; stable (and messing up the matching).
-
 define i32 @func(i32 %x) {
 ; CHECK-LABEL: define i32 @func(
 ; CHECK-SAME: i32 [[X:%.*]]) {
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected
index 3d0f772505a659..08d3c22172ee3f 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values3.ll.expected
@@ -1,9 +1,6 @@
 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3
 ; RUN: opt < %s -S | FileCheck %s
 
-; Test that we don't regress diff quality by trying to keep variable names
-; stable (and messing up the matching).
-
 define i32 @func(i32 %x) {
 ; CHECK-LABEL: define i32 @func(
 ; CHECK-SAME: i32 [[X:%.*]]) {
diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
index 5962bdafd9ea0a..126807bed1cc6e 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
@@ -8,19 +8,19 @@ define i32 @func(i32 %x) {
 ; CHECK-LABEL: define i32 @func(
 ; CHECK-SAME: i32 [[X:%.*]]) {
 ; CHECK-NEXT:    [[TMP1:%.*]] = mul i32 [[X]], 3
-; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i32 [[TMP1]])
-; CHECK-NEXT:    [[TMP3:%.*]] = call i32 @foo(i32 [[TMP2]])
+; CHECK-NEXT:    [[TMP3:%.*]] = call i32 @foo(i32 [[TMP1]])
 ; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i32 [[TMP3]])
 ; CHECK-NEXT:    [[TMP5:%.*]] = call i32 @foo(i32 [[TMP4]])
 ; CHECK-NEXT:    [[TMP6:%.*]] = call i32 @foo(i32 [[TMP5]])
 ; CHECK-NEXT:    [[TMP7:%.*]] = call i32 @foo(i32 [[TMP6]])
-; CHECK-NEXT:    [[TMP8:%.*]] = xor i32 [[TMP7]], 1
-; CHECK-NEXT:    [[TMP9:%.*]] = call i32 @foo(i32 [[TMP8]])
-; CHECK-NEXT:    [[TMP10:%.*]] = add i32 [[TMP9]], 1
+; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i32 [[TMP7]])
+; CHECK-NEXT:    [[TMP13:%.*]] = xor i32 [[TMP8]], 1
+; CHECK-NEXT:    [[TMP14:%.*]] = call i32 @foo(i32 [[TMP13]])
+; CHECK-NEXT:    [[TMP9:%.*]] = add i32 [[TMP14]], 1
+; CHECK-NEXT:    [[TMP10:%.*]] = call i32 @foo(i32 [[TMP9]])
 ; CHECK-NEXT:    [[TMP11:%.*]] = call i32 @foo(i32 [[TMP10]])
 ; CHECK-NEXT:    [[TMP12:%.*]] = call i32 @foo(i32 [[TMP11]])
-; CHECK-NEXT:    [[TMP13:%.*]] = call i32 @foo(i32 [[TMP12]])
-; CHECK-NEXT:    ret i32 [[TMP13]]
+; CHECK-NEXT:    ret i32 [[TMP12]]
 ;
   %1 = mul i32 %x, 3
   %2 = call i32 @foo(i32 %1)
diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index 5eaf225d490705..3ae0752a39da25 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -1233,127 +1233,56 @@ def may_clash_with_default_check_prefix_name(check_prefix, var):
     )
 
 
-VARIABLE_TAG = "[[@@]]"
-METAVAR_RE = re.compile(r"\[\[([A-Z0-9_]+)(?::[^]]+)?\]\]")
-NUMERIC_SUFFIX_RE = re.compile(r"[0-9]*$")
-
-
-class CheckValueInfo:
-    def __init__(
-        self,
-        nameless_value: NamelessValue,
-        var: str,
-        prefix: str,
-    ):
-        self.nameless_value = nameless_value
-        self.var = var
-        self.prefix = prefix
-
-
-# Represent a check line in a way that allows us to compare check lines while
-# ignoring some or all of the FileCheck variable names.
-class CheckLineInfo:
-    def __init__(self, line, values):
-        # Line with all FileCheck variable name occurrences replaced by VARIABLE_TAG
-        self.line: str = line
-
-        # Information on each FileCheck variable name occurrences in the line
-        self.values: List[CheckValueInfo] = values
-
-    def __repr__(self):
-        return f"CheckLineInfo(line={self.line}, self.values={self.values})"
+def find_diff_matching(lhs: List[str], rhs: List[str]) -> List[int]:
+    """
+    Find a large ordered matching between strings in lhs and rhs.
 
+    Think of this as finding the *unchanged* lines in a diff, where the entries
+    of lhs and rhs are lines of the files being diffed.
 
-def remap_metavar_names(
-    orig_line_infos: List[CheckLineInfo],
-    new_line_infos: List[CheckLineInfo],
-    committed_names: Set[str],
-) -> Mapping[str, str]:
+    Returns a list of matched (lhs_idx, rhs_idx) pairs.
     """
-    Map all FileCheck variable names that appear in new_line_infos to new
-    FileCheck variable names in an attempt to reduce the diff from orig_line_infos
-    to new_line_infos.
 
-    This is done by:
-    * Committing variable names that participate in fully unchanged lines
-      (regardless of how these lines are ordered).
-    * Attempt to cause additional lines to "become unchanged" by matching
-      similar lines using a diffing algorithm.
-    * Recursing into intervals between the matched lines.
-    """
-    # Initialize uncommitted identity mappings
-    new_mapping = {}
-    for line in new_line_infos:
-        for value in line.values:
-            new_mapping[value.var] = value.var
+    # Collect matches in reverse order.
+    matches = []
 
-    # Recursively commit to the identity mapping or find a better one
-    def recurse(
-        orig_line_infos: List[CheckLineInfo], new_line_infos: List[CheckLineInfo]
-    ):
-        if not new_line_infos or not orig_line_infos:
+    def recurse(lhs_start, lhs_end, rhs_start, rhs_end):
+        if lhs_start == lhs_end or rhs_start == rhs_end:
             return
 
-        lines = set()
+        # First, collect a set of candidate matching edges. We limit this to a
+        # constant multiple of the input size to avoid quadratic runtime.
+        patterns = collections.defaultdict(lambda: ([], []))
 
-        # Search for lines that are identical on both sides, including meta
-        # variable names, and commit to those names immediately
-        for line in orig_line_infos:
-            key = (line.line.strip(), tuple(value.var for value in line.values))
-            lines.add(key)
+        for idx in range(lhs_start, lhs_end):
+            patterns[lhs[idx]][0].append(idx)
+        for idx in range(rhs_start, rhs_end):
+            patterns[rhs[idx]][1].append(idx)
 
-        for line in new_line_infos:
-            key = (
-                line.line.strip(),
-                tuple(new_mapping[value.var] for value in line.values),
-            )
-            if key in lines:
-                for value in line.values:
-                    committed_names.add(new_mapping[value.var])
-
-        # Search for lines that are unique on both sides if we only consider
-        # variable names that have been committed.
-        lines = collections.defaultdict(lambda: [None, None])
-        for i, line in enumerate(orig_line_infos):
-            key = (
-                line.line.strip(),
-                tuple(
-                    value.var for value in line.values if value.var in committed_names
-                ),
-            )
-            entry = lines[key]
-            if entry[0] is None:
-                entry[0] = i
-            else:
-                entry[0] = False
-
-        for i, line in enumerate(new_line_infos):
-            key = (
-                line.line.strip(),
-                tuple(
-                    new_mapping[value.var]
-                    for value in line.values
-                    if new_mapping[value.var] in committed_names
-                ),
-            )
-            entry = lines[key]
-            if entry[1] is None:
-                entry[1] = i
+        multiple_patterns = []
+
+        candidates = []
+        for pattern in patterns.values():
+            if not pattern[0] or not pattern[1]:
+                continue
+
+            if len(pattern[0]) == len(pattern[1]) == 1:
+                candidates.append((pattern[0][0], pattern[1][0]))
             else:
-                entry[1] = False
+                multiple_patterns.append(pattern)
 
-        unique_matches = []
-        for entry in lines.values():
-            if (
-                entry[0] is not None
-                and entry[0] is not False
-                and entry[1] is not None
-                and entry[1] is not False
-            ):
-                unique_matches.append((entry[0], entry[1]))
+        multiple_patterns.sort(key=lambda pattern: len(pattern[0]) * len(pattern[1]))
 
-        if not unique_matches:
-            # There are no unique matches. This is the recursion base case.
+        for pattern in multiple_patterns:
+            if len(candidates) + len(pattern[0]) * len(pattern[1]) > 2 * (len(lhs) + len(rhs)):
+                break
+            for lhs_idx in pattern[0]:
+                for rhs_idx in pattern[1]:
+                    candidates.append((lhs_idx, rhs_idx))
+
+        if not candidates:
+            # The LHS and RHS either share nothing in common, or lines are just too
+            # identical. In that case, let's give up and not match anything.
             return
 
         # Compute a maximal crossing-free matching via an algorithm that is
@@ -1402,86 +1331,177 @@ def recurse(
         # Therefore, the algorithm is trivially O(M log M) in the number of
         # points. Since we only consider lines that are unique, it is log-linear
         # in the problem size.
-        unique_matches.sort(key=lambda entry: entry[0])
+        candidates.sort(key=lambda candidate: (candidate[0], -candidate[1]))
 
         backlinks = []
         table = []
-        for _, new_idx in unique_matches:
-            ti = bisect.bisect_left(table, new_idx, key=lambda entry: entry[0])
+        for _, rhs_idx in candidates:
+            candidate_idx = len(backlinks)
+            ti = bisect.bisect_left(table, rhs_idx, key=lambda entry: entry[0])
             if ti < len(table):
-                table[ti] = (new_idx, len(backlinks))
+                table[ti] = (rhs_idx, candidate_idx)
             else:
-                table.append((new_idx, len(backlinks)))
+                table.append((rhs_idx, candidate_idx))
             if ti > 0:
                 backlinks.append(table[ti - 1][1])
             else:
                 backlinks.append(None)
 
-        # Commit to names in the matching, re-checking compatibility as we go along
+        # Commit to names in the matching by walking the backlinks. Recursively
+        # attempt to fill in more matches in-betweem.
+        previous = (lhs_end, rhs_end)
         match_idx = table[-1][1]
-        matches = []
         while match_idx is not None:
-            orig_idx, new_idx = unique_matches[match_idx]
+            current = candidates[match_idx]
+            recurse(current[0] + 1, previous[0], current[1] + 1, previous[1])
+            matches.append(current)
+            previous = current
+            match_idx = backlinks[match_idx]
+        recurse(lhs_start, previous[0], rhs_start, previous[1])
+
+    recurse(0, len(lhs), 0, len(rhs))
+
+    matches.reverse()
+    return matches
+
+
+VARIABLE_TAG = "[[@@]]"
+METAVAR_RE = re.compile(r"\[\[([A-Z0-9_]+)(?::[^]]+)?\]\]")
+NUMERIC_SUFFIX_RE = re.compile(r"[0-9]*$")
+
+
+class CheckValueInfo:
+    def __init__(
+        self,
+        nameless_value: NamelessValue,
+        var: str,
+        prefix: str,
+    ):
+        self.nameless_value = nameless_value
+        self.var = var
+        self.prefix = prefix
+
+
+# Represent a check line in a way that allows us to compare check lines while
+# ignoring some or all of the FileCheck variable names.
+class CheckLineInfo:
+    def __init__(self, line, values):
+        # Line with all FileCheck variable name occurrences replaced by VARIABLE_TAG
+        self.line: str = line
+
+        # Information on each FileCheck variable name occurrences in the line
+        self.values: List[CheckValueInfo] = values
+
+    def __repr__(self):
+        return f"CheckLineInfo(line={self.line}, self.values={self.values})"
+
+
+def remap_metavar_names(
+    old_line_infos: List[CheckLineInfo],
+    new_line_infos: List[CheckLineInfo],
+    committed_names: Set[str],
+) -> Mapping[str, str]:
+    """
+    Map all FileCheck variable names that appear in new_line_infos to new
+    FileCheck variable names in an attempt to reduce the diff from old_line_infos
+    to new_line_infos.
+
+    This is done by:
+    * Matching old check lines and new check lines using a diffing algorithm
+      applied after replacing names with wildcards.
+    * Committing to variable names such that the matched lines become equal
+      (without wildcards) if possible
+    * This is done recursively to handle cases where many lines are equal
+      after wildcard replacement
+    """
+    # Initialize uncommitted identity mappings
+    new_mapping = {}
+    for line in new_line_infos:
+        for value in line.values:
+            new_mapping[value.var] = value.var
+
+    # Recursively commit to the identity mapping or find a better one
+    def recurse(old_begin, old_end, new_begin, new_end):
+        if old_begin == old_end or new_begin == new_end:
+            return
+
+        # Find a matching of lines where uncommitted names are replaced
+        # with a placeholder.
+        def diffify_line(line, mapper):
+            values = []
+            for value in line.values:
+                mapped = mapper(value.var)
+                values.append(mapped if mapped in committed_names else '?')
+            return line.line.strip() + ' @@@ ' + ' @ '.join(values)
+
+        lhs_lines = [
+            diffify_line(line, lambda x: x)
+            for line in old_line_infos[old_begin:old_end]
+        ]
+        rhs_lines = [
+            diffify_line(line, lambda x: new_mapping[x])
+            for line in new_line_infos[new_begin:new_end]
+        ]
+
+        candidate_matches = find_diff_matching(lhs_lines, rhs_lines)
+
+        # Apply commits greedily on a match-by-match basis
+        matches = [(-1,-1)]
+        committed_anything = False
+        for lhs_idx, rhs_idx in candidate_matches:
+            lhs_line = old_line_infos[lhs_idx]
+            rhs_line = new_line_infos[rhs_idx]
+
             local_commits = {}
 
-            for orig_value, new_value in zip(
-                orig_line_infos[orig_idx].values, new_line_infos[new_idx].values
-            ):
-                if new_mapping[new_value.var] in committed_names:
-                    # The new value has already been committed by a previous match we considered
-                    # during the outer loop. If it was mapped to the same name as the original value,
-                    # we can consider committing other values from this line. Otherwise, we should
-                    # ignore this line.
-                    if new_mapping[new_value.var] == orig_value.var:
+            for lhs_value, rhs_value in zip(lhs_line.values, rhs_line.values):
+                if new_mapping[rhs_value.var] in committed_names:
+                    # The new value has already been committed. If it was mapped
+                    # to the same name as the original value, we can consider
+                    # # committing other values from this line. Otherwise, we
+                    # should ignore this line.
+                    if new_mapping[rhs_value.var] == lhs_value.var:
                         continue
                     else:
                         break
 
-                if new_value.var in local_commits:
+                if rhs_value.var in local_commits:
                     # Same, but for a possible commit happening on the same line
-                    if local_commits[new_value.var] == orig_value.var:
+                    if local_commits[rhs_value.var] == lhs_value.var:
                         continue
                     else:
                         break
 
-                if orig_value.var in committed_names:
+                if lhs_value.var in committed_names:
                     # We can't map this value because the name we would map it to has already been
                     # committed for something else. Give up on this line.
                     break
 
-                local_commits[new_value.var] = orig_value.var
+                local_commits[rhs_value.var] = lhs_value.var
             else:
                 # No reason not to add any commitments for this line
-                for new_var, orig_var in local_commits.items():
-                    new_mapping[new_var] = orig_var
-                    committed_names.add(orig_var)
+                for rhs_var, lhs_var in local_commits.items():
+                    new_mapping[rhs_var] = lhs_var
+                    committed_names.add(lhs_var)
+                    committed_anything = True
 
                     if (
-                        orig_var != new_var
-                        and orig_var in new_mapping
-                        and new_mapping[orig_var] == orig_var
+                        lhs_var != rhs_var
+                        and lhs_var in new_mapping
+                        and new_mapping[lhs_var] == lhs_var
                     ):
-                        new_mapping[orig_var] = "conflict_" + orig_var
+                        new_mapping[lhs_var] = "conflict_" + lhs_var
 
-                matches.append((orig_idx, new_idx))
+                matches.append((lhs_idx, rhs_idx))
 
-            match_idx = backlinks[match_idx]
+        matches.append((old_end, new_end))
 
-        # Recurse into the intervals between matches. Add sentinels to simplify
-        # the iteration over intervals.
-        matches.append((-1, -1))
-        matches.reverse()
-        matches.append((len(orig_line_infos), len(new_line_infos)))
-
-        for ((left_orig_idx, left_new_idx), (right_orig_idx, right_new_idx)) in zip(matches, matches[1:]):
-            assert left_orig_idx < right_orig_idx
-            assert left_new_idx < right_new_idx
-            recurse(
-                orig_line_infos[left_orig_idx + 1 : right_orig_idx],
-                new_line_infos[left_new_idx + 1 : right_new_idx],
-            )
+        # Recursively handle sequences between matches
+        if committed_anything:
+            for ((lhs_prev, rhs_prev), (lhs_next, rhs_next)) in zip(matches, matches[1:]):
+                recurse(lhs_prev + 1, lhs_next, rhs_prev + 1, rhs_next)
 
-    recurse(orig_line_infos, new_line_infos)
+    recurse(0, len(old_line_infos), 0, len(new_line_infos))
 
     # Commit to remaining names and resolve conflicts
     for new_name, mapped_name in new_mapping.items():
@@ -1503,6 +1523,7 @@ def recurse(
             candidate = f"{base_name}{suffix}"
             if candidate not in committed_names:
                 new_mapping[new_name] = candidate
+                committed_names.add(candidate)
                 break
             suffix += 1
 

>From e4df4d1f526fb1980b53090d846c950841f23ddb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Wed, 6 Mar 2024 20:31:25 +0100
Subject: [PATCH 7/8] squash! update_test_checks: keep meta variables stable by
 default

- Remove inner recursion
- Add some more comments
- Minor fixups
---
 .../Inputs/stable_ir_values4.ll.expected      |  10 +-
 llvm/utils/UpdateTestChecks/common.py         | 238 +++++++++---------
 2 files changed, 127 insertions(+), 121 deletions(-)

diff --git a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
index 126807bed1cc6e..e3fa51598c48e3 100644
--- a/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
+++ b/llvm/test/tools/UpdateTestChecks/update_test_checks/Inputs/stable_ir_values4.ll.expected
@@ -8,15 +8,15 @@ define i32 @func(i32 %x) {
 ; CHECK-LABEL: define i32 @func(
 ; CHECK-SAME: i32 [[X:%.*]]) {
 ; CHECK-NEXT:    [[TMP1:%.*]] = mul i32 [[X]], 3
-; CHECK-NEXT:    [[TMP3:%.*]] = call i32 @foo(i32 [[TMP1]])
+; CHECK-NEXT:    [[TMP2:%.*]] = call i32 @foo(i32 [[TMP1]])
+; CHECK-NEXT:    [[TMP3:%.*]] = call i32 @foo(i32 [[TMP2]])
 ; CHECK-NEXT:    [[TMP4:%.*]] = call i32 @foo(i32 [[TMP3]])
 ; CHECK-NEXT:    [[TMP5:%.*]] = call i32 @foo(i32 [[TMP4]])
 ; CHECK-NEXT:    [[TMP6:%.*]] = call i32 @foo(i32 [[TMP5]])
 ; CHECK-NEXT:    [[TMP7:%.*]] = call i32 @foo(i32 [[TMP6]])
-; CHECK-NEXT:    [[TMP8:%.*]] = call i32 @foo(i32 [[TMP7]])
-; CHECK-NEXT:    [[TMP13:%.*]] = xor i32 [[TMP8]], 1
-; CHECK-NEXT:    [[TMP14:%.*]] = call i32 @foo(i32 [[TMP13]])
-; CHECK-NEXT:    [[TMP9:%.*]] = add i32 [[TMP14]], 1
+; CHECK-NEXT:    [[TMP8:%.*]] = xor i32 [[TMP7]], 1
+; CHECK-NEXT:    [[TMP13:%.*]] = call i32 @foo(i32 [[TMP8]])
+; CHECK-NEXT:    [[TMP9:%.*]] = add i32 [[TMP13]], 1
 ; CHECK-NEXT:    [[TMP10:%.*]] = call i32 @foo(i32 [[TMP9]])
 ; CHECK-NEXT:    [[TMP11:%.*]] = call i32 @foo(i32 [[TMP10]])
 ; CHECK-NEXT:    [[TMP12:%.*]] = call i32 @foo(i32 [[TMP11]])
diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index 3ae0752a39da25..668d5952333ca6 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -447,9 +447,7 @@ def collect_original_check_lines(ti: TestInfo, prefix_set: set):
                 ):
                     if m.group(1) not in current_function:
                         current_function[m.group(1)] = []
-                    current_function[m.group(1)].append(
-                        input_line[m.end() :].strip()
-                    )
+                    current_function[m.group(1)].append(input_line[m.end() :].strip())
                 continue
             current_function = None
 
@@ -462,7 +460,7 @@ def collect_original_check_lines(ti: TestInfo, prefix_set: set):
 
             assert func_name not in result
             current_function = result[func_name] = {}
-    
+
     return result
 
 
@@ -1233,7 +1231,7 @@ def may_clash_with_default_check_prefix_name(check_prefix, var):
     )
 
 
-def find_diff_matching(lhs: List[str], rhs: List[str]) -> List[int]:
+def find_diff_matching(lhs: List[str], rhs: List[str]) -> List[tuple]:
     """
     Find a large ordered matching between strings in lhs and rhs.
 
@@ -1243,123 +1241,130 @@ def find_diff_matching(lhs: List[str], rhs: List[str]) -> List[int]:
     Returns a list of matched (lhs_idx, rhs_idx) pairs.
     """
 
+    if not lhs or not rhs:
+        return []
+
     # Collect matches in reverse order.
     matches = []
 
-    def recurse(lhs_start, lhs_end, rhs_start, rhs_end):
-        if lhs_start == lhs_end or rhs_start == rhs_end:
-            return
-
-        # First, collect a set of candidate matching edges. We limit this to a
-        # constant multiple of the input size to avoid quadratic runtime.
-        patterns = collections.defaultdict(lambda: ([], []))
+    # First, collect a set of candidate matching edges. We limit this to a
+    # constant multiple of the input size to avoid quadratic runtime.
+    patterns = collections.defaultdict(lambda: ([], []))
 
-        for idx in range(lhs_start, lhs_end):
-            patterns[lhs[idx]][0].append(idx)
-        for idx in range(rhs_start, rhs_end):
-            patterns[rhs[idx]][1].append(idx)
+    for idx in range(len(lhs)):
+        patterns[lhs[idx]][0].append(idx)
+    for idx in range(len(rhs)):
+        patterns[rhs[idx]][1].append(idx)
 
-        multiple_patterns = []
+    multiple_patterns = []
 
-        candidates = []
-        for pattern in patterns.values():
-            if not pattern[0] or not pattern[1]:
-                continue
-
-            if len(pattern[0]) == len(pattern[1]) == 1:
-                candidates.append((pattern[0][0], pattern[1][0]))
-            else:
-                multiple_patterns.append(pattern)
+    candidates = []
+    for pattern in patterns.values():
+        if not pattern[0] or not pattern[1]:
+            continue
 
-        multiple_patterns.sort(key=lambda pattern: len(pattern[0]) * len(pattern[1]))
+        if len(pattern[0]) == len(pattern[1]) == 1:
+            candidates.append((pattern[0][0], pattern[1][0]))
+        else:
+            multiple_patterns.append(pattern)
 
-        for pattern in multiple_patterns:
-            if len(candidates) + len(pattern[0]) * len(pattern[1]) > 2 * (len(lhs) + len(rhs)):
-                break
-            for lhs_idx in pattern[0]:
-                for rhs_idx in pattern[1]:
-                    candidates.append((lhs_idx, rhs_idx))
+    multiple_patterns.sort(key=lambda pattern: len(pattern[0]) * len(pattern[1]))
 
-        if not candidates:
-            # The LHS and RHS either share nothing in common, or lines are just too
-            # identical. In that case, let's give up and not match anything.
-            return
+    for pattern in multiple_patterns:
+        if len(candidates) + len(pattern[0]) * len(pattern[1]) > 2 * (
+            len(lhs) + len(rhs)
+        ):
+            break
+        for lhs_idx in pattern[0]:
+            for rhs_idx in pattern[1]:
+                candidates.append((lhs_idx, rhs_idx))
+
+    if not candidates:
+        # The LHS and RHS either share nothing in common, or lines are just too
+        # identical. In that case, let's give up and not match anything.
+        return []
 
-        # Compute a maximal crossing-free matching via an algorithm that is
-        # inspired by a mixture of dynamic programming and line-sweeping in
-        # discrete geometry.
-        #
-        # I would be surprised if this algorithm didn't exist somewhere in the
-        # literature, but I found it without consciously recalling any
-        # references, so you'll have to make do with the explanation below.
-        # Sorry.
-        #
-        # The underlying graph is bipartite:
-        #  - nodes on the LHS represent lines in the original check
-        #  - nodes on the RHS represent lines in the new (updated) check
-        #
-        # Nodes are implicitly sorted by the corresponding line number.
-        # Edges (unique_matches) are sorted by the line number on the LHS.
-        #
-        # Here's the geometric intuition for the algorithm.
-        #
-        #  * Plot the edges as points in the plane, with the original line
-        #    number on the X axis and the updated line number on the Y axis.
-        #  * The goal is to find a longest "chain" of points where each point
-        #    is strictly above and to the right of the previous point.
-        #  * The algorithm proceeds by sweeping a vertical line from left to
-        #    right.
-        #  * The algorithm maintains a table where `table[N]` answers the
-        #    question "What is currently the 'best' way to build a chain of N+1
-        #    points to the left of the vertical line". Here, 'best' means
-        #    that the last point of the chain is a as low as possible (minimal
-        #    Y coordinate).
-        #   * `table[N]` is `(y, point_idx)` where `point_idx` is the index of
-        #     the last point in the chain and `y` is its Y coordinate
-        #   * A key invariant is that the Y values in the table are
-        #     monotonically increasing
-        #  * Thanks to these properties, the table can be used to answer the
-        #    question "What is the longest chain that can be built to the left
-        #    of the vertical line using only points below a certain Y value",
-        #    using a binary search over the table.
-        #  * The algorithm also builds a backlink structure in which every point
-        #    links back to the previous point on a best (longest) chain ending
-        #    at that point
+    # Compute a maximal crossing-free matching via an algorithm that is
+    # inspired by a mixture of dynamic programming and line-sweeping in
+    # discrete geometry.
+    #
+    # I would be surprised if this algorithm didn't exist somewhere in the
+    # literature, but I found it without consciously recalling any
+    # references, so you'll have to make do with the explanation below.
+    # Sorry.
+    #
+    # The underlying graph is bipartite:
+    #  - nodes on the LHS represent lines in the original check
+    #  - nodes on the RHS represent lines in the new (updated) check
+    #
+    # Nodes are implicitly sorted by the corresponding line number.
+    # Edges (unique_matches) are sorted by the line number on the LHS.
+    #
+    # Here's the geometric intuition for the algorithm.
+    #
+    #  * Plot the edges as points in the plane, with the original line
+    #    number on the X axis and the updated line number on the Y axis.
+    #  * The goal is to find a longest "chain" of points where each point
+    #    is strictly above and to the right of the previous point.
+    #  * The algorithm proceeds by sweeping a vertical line from left to
+    #    right.
+    #  * The algorithm maintains a table where `table[N]` answers the
+    #    question "What is currently the 'best' way to build a chain of N+1
+    #    points to the left of the vertical line". Here, 'best' means
+    #    that the last point of the chain is a as low as possible (minimal
+    #    Y coordinate).
+    #   * `table[N]` is `(y, point_idx)` where `point_idx` is the index of
+    #     the last point in the chain and `y` is its Y coordinate
+    #   * A key invariant is that the Y values in the table are
+    #     monotonically increasing
+    #  * Thanks to these properties, the table can be used to answer the
+    #    question "What is the longest chain that can be built to the left
+    #    of the vertical line using only points below a certain Y value",
+    #    using a binary search over the table.
+    #  * The algorithm also builds a backlink structure in which every point
+    #    links back to the previous point on a best (longest) chain ending
+    #    at that point
+    #
+    # The core loop of the algorithm sweeps the line and updates the table
+    # and backlink structure for every point that we cross during the sweep.
+    # Therefore, the algorithm is trivially O(M log M) in the number of
+    # points.
+    candidates.sort(key=lambda candidate: (candidate[0], -candidate[1]))
+
+    backlinks = []
+    table = []
+    for _, rhs_idx in candidates:
+        candidate_idx = len(backlinks)
+        ti = bisect.bisect_left(table, rhs_idx, key=lambda entry: entry[0])
+
+        # Update the table to record a best chain ending in the current point.
+        # There always is one, and if any of the previously visited points had
+        # a higher Y coordinate, then there is always a previously recorded best
+        # chain that can be improved upon by using the current point.
         #
-        # The core loop of the algorithm sweeps the line and updates the table
-        # and backlink structure for every point that we cross during the sweep.
-        # Therefore, the algorithm is trivially O(M log M) in the number of
-        # points. Since we only consider lines that are unique, it is log-linear
-        # in the problem size.
-        candidates.sort(key=lambda candidate: (candidate[0], -candidate[1]))
-
-        backlinks = []
-        table = []
-        for _, rhs_idx in candidates:
-            candidate_idx = len(backlinks)
-            ti = bisect.bisect_left(table, rhs_idx, key=lambda entry: entry[0])
-            if ti < len(table):
-                table[ti] = (rhs_idx, candidate_idx)
-            else:
-                table.append((rhs_idx, candidate_idx))
-            if ti > 0:
-                backlinks.append(table[ti - 1][1])
-            else:
-                backlinks.append(None)
-
-        # Commit to names in the matching by walking the backlinks. Recursively
-        # attempt to fill in more matches in-betweem.
-        previous = (lhs_end, rhs_end)
-        match_idx = table[-1][1]
-        while match_idx is not None:
-            current = candidates[match_idx]
-            recurse(current[0] + 1, previous[0], current[1] + 1, previous[1])
-            matches.append(current)
-            previous = current
-            match_idx = backlinks[match_idx]
-        recurse(lhs_start, previous[0], rhs_start, previous[1])
-
-    recurse(0, len(lhs), 0, len(rhs))
+        # There is only one case where there is some ambiguity. If the
+        # pre-existing entry table[ti] has the same Y coordinate / rhs_idx as
+        # the current point (this can only happen if the same line appeared
+        # multiple times on the LHS), then we could choose to keep the
+        # previously recorded best chain instead. That would bias the algorithm
+        # differently but should have no systematic impact on the quality of the
+        # result.
+        if ti < len(table):
+            table[ti] = (rhs_idx, candidate_idx)
+        else:
+            table.append((rhs_idx, candidate_idx))
+        if ti > 0:
+            backlinks.append(table[ti - 1][1])
+        else:
+            backlinks.append(None)
+
+    # Commit to names in the matching by walking the backlinks. Recursively
+    # attempt to fill in more matches in-betweem.
+    match_idx = table[-1][1]
+    while match_idx is not None:
+        current = candidates[match_idx]
+        matches.append(current)
+        match_idx = backlinks[match_idx]
 
     matches.reverse()
     return matches
@@ -1431,8 +1436,8 @@ def diffify_line(line, mapper):
             values = []
             for value in line.values:
                 mapped = mapper(value.var)
-                values.append(mapped if mapped in committed_names else '?')
-            return line.line.strip() + ' @@@ ' + ' @ '.join(values)
+                values.append(mapped if mapped in committed_names else "?")
+            return line.line.strip() + " @@@ " + " @ ".join(values)
 
         lhs_lines = [
             diffify_line(line, lambda x: x)
@@ -1446,7 +1451,7 @@ def diffify_line(line, mapper):
         candidate_matches = find_diff_matching(lhs_lines, rhs_lines)
 
         # Apply commits greedily on a match-by-match basis
-        matches = [(-1,-1)]
+        matches = [(-1, -1)]
         committed_anything = False
         for lhs_idx, rhs_idx in candidate_matches:
             lhs_line = old_line_infos[lhs_idx]
@@ -1458,7 +1463,7 @@ def diffify_line(line, mapper):
                 if new_mapping[rhs_value.var] in committed_names:
                     # The new value has already been committed. If it was mapped
                     # to the same name as the original value, we can consider
-                    # # committing other values from this line. Otherwise, we
+                    # committing other values from this line. Otherwise, we
                     # should ignore this line.
                     if new_mapping[rhs_value.var] == lhs_value.var:
                         continue
@@ -1498,7 +1503,7 @@ def diffify_line(line, mapper):
 
         # Recursively handle sequences between matches
         if committed_anything:
-            for ((lhs_prev, rhs_prev), (lhs_next, rhs_next)) in zip(matches, matches[1:]):
+            for (lhs_prev, rhs_prev), (lhs_next, rhs_next) in zip(matches, matches[1:]):
                 recurse(lhs_prev + 1, lhs_next, rhs_prev + 1, rhs_next)
 
     recurse(0, len(old_line_infos), 0, len(new_line_infos))
@@ -1529,6 +1534,7 @@ def diffify_line(line, mapper):
 
     return new_mapping
 
+
 def generalize_check_lines_common(
     lines,
     is_analyze,

>From 07ea3adb5aa1852b2e16ae84ecf161bda0631305 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Nicolai=20H=C3=A4hnle?= <nicolai.haehnle at amd.com>
Date: Thu, 7 Mar 2024 22:49:20 +0100
Subject: [PATCH 8/8] squash! update_test_checks: keep meta variables stable by
 default

Attempt to fix the Windows builder -- my guess is that a different
Python version doesn't accept this construct (though I don't understand
why it only fails Clang's update_cc_test_checks).
---
 llvm/utils/UpdateTestChecks/common.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/llvm/utils/UpdateTestChecks/common.py b/llvm/utils/UpdateTestChecks/common.py
index 668d5952333ca6..68ce4f9fe2dcd9 100644
--- a/llvm/utils/UpdateTestChecks/common.py
+++ b/llvm/utils/UpdateTestChecks/common.py
@@ -1544,7 +1544,7 @@ def generalize_check_lines_common(
     nameless_value_regex: re.Pattern,
     is_asm,
     preserve_names,
-    original_check_lines: List[str] | None = None,
+    original_check_lines = None,
 ):
     # This gets called for each match that occurs in
     # a line. We transform variables we haven't seen



More information about the llvm-commits mailing list