<table border="1" cellspacing="0" cellpadding="8">
    <tr>
        <th>Issue</th>
        <td>
            <a href=https://github.com/llvm/llvm-project/issues/57852>57852</a>
        </td>
    </tr>

    <tr>
        <th>Summary</th>
        <td>
            [Coroutines] clang fails to apply HALO on a tuple of coroutine objects.
        </td>
    </tr>

    <tr>
      <th>Labels</th>
      <td>
            new issue
      </td>
    </tr>

    <tr>
      <th>Assignees</th>
      <td>
      </td>
    </tr>

    <tr>
      <th>Reporter</th>
      <td>
          snapshotpp
      </td>
    </tr>
</table>

<pre>
    https://godbolt.org/z/a94rbET8Y

```
#include <atomic>
#include <coroutine>
#include <exception>
#include <variant>
#include <tuple>
#include <utility>

// when_all implementation
template <typename T>
class when_all_task;

class when_all_task_counter {
public:
    when_all_task_counter(std::size_t sz) : count_{ sz + 1 }, awaiting_{} {}
    void register_coroutine(std::coroutine_handle<> h) { awaiting_ = h; }
    bool ready() { return count_.fetch_sub(1, std::memory_order_acq_rel) > 1; }
    std::coroutine_handle<> count_down() {
        if (count_.fetch_sub(1, std::memory_order_acq_rel) == 1) {
            return awaiting_;
        }
        return std::noop_coroutine();
    }

private:
    std::atomic<std::size_t> count_{};
    std::coroutine_handle<> awaiting_;
};

template <typename T>
class when_all_task_promise;

template <typename T>
class when_all_task_promise {
public:
    auto get_return_object() {
        return std::coroutine_handle<when_all_task_promise>::from_promise(
            *this);
    }

    static void* operator new(std::size_t sz); // this triggers linker error if HALO fails.

    std::suspend_always initial_suspend() noexcept { return {}; }

    auto final_suspend() noexcept {
        struct awaiter {
            bool await_ready() noexcept { return false; }
            std::coroutine_handle<> await_suspend(
                std::coroutine_handle<when_all_task_promise> handle) noexcept {
                return handle.promise().counter_->count_down();
            }
            void await_resume() noexcept {}
        };
        return awaiter{};
    }

    void unhandled_exception() { result_.template emplace<2>(std::current_exception()); }

    auto yield_value(T&& result) noexcept {
        result_ = std::addressof(result);
        return final_suspend();
    }

    void return_void() {}

    T& get_result() {
        assert(result_.index() != 0);
        if (result_.index() == 2) {
            std::rethrow_exception(std::get<2>(result_));
        }
        return *std::get<1>(result_);
    }

    void start(when_all_task_counter& counter) {
        counter_ = &counter;
        std::coroutine_handle<when_all_task_promise>::from_promise(*this)
            .resume();
    }

private:
    when_all_task_counter* counter_{};
    std::variant<std::monostate, std::add_pointer_t<T>, std::exception_ptr>
        result_{};
};

template <typename T>
class when_all_task {
public:
    using promise_type = when_all_task_promise<T>;

    when_all_task(std::coroutine_handle<promise_type> h) : handle_{ h } {}

    when_all_task(when_all_task&& other) noexcept
        : handle_{ std::exchange(other.handle_, nullptr) } {}

    when_all_task& operator=(when_all_task&& other) noexcept {
        std::swap(handle_, other.handle_);
        return *this;
    }

    when_all_task(when_all_task const&) = delete;
    when_all_task& operator=(when_all_task const&) = delete;

    ~when_all_task() {
        if (handle_) {
            handle_.destroy();
        }
    }

    void start(when_all_task_counter& counter) {
        handle_.promise().start(counter);
    }

    decltype(auto) get_result() {
        return handle_.promise().get_result();
    }

private:
    std::coroutine_handle<promise_type> handle_;
};

template <typename T>
class when_all_awaitable;

template <typename... Ts>
class when_all_awaitable<std::tuple<Ts...>> {
public:
    when_all_awaitable(std::tuple<Ts...>&& tasks)
        : tasks_{ std::move(tasks) }, counter_{ sizeof...(Ts) } {}

    struct awaiter {
        awaiter(when_all_awaitable& a) : awaitable{ a } {}
        bool await_ready() { return false; }

        bool await_suspend(std::coroutine_handle<> awaiting) {
            awaitable.start_tasks(awaiting);
            return awaitable.counter_.ready();
        }

        std::tuple<std::decay_t<decltype(std::declval<Ts>().get_result())>...>
            await_resume() {
            return std::apply(
                [](auto&&... tasks) {
                    return std::make_tuple(tasks.get_result()...);
                },
                awaitable.tasks_);
        }

        when_all_awaitable& awaitable;
    };
    auto operator co_await() { return awaiter{ *this }; }

private:
    void start_tasks(std::coroutine_handle<> awaiting) {
        counter_.register_coroutine(awaiting);
        start_tasks_impl(std::make_index_sequence<sizeof...(Ts)>{});
    }
    template <std::size_t... Is>
    void start_tasks_impl(std::index_sequence<Is...>) {
        (void)std::initializer_list<int>{
            (std::get<Is>(tasks_).start(counter_), 0)...};
    }

private:
    when_all_task_counter counter_;
    std::tuple<Ts...> tasks_;
};

// metaclass helper to extract result of an awaiter
template<typename T>
struct await_result {
    using type = decltype(std::declval<T>().await_resume());
};

template<typename T>
using await_result_t = typename await_result<T>::type;
// metaclass helper to extract result of an awaiter ends here

template <typename Awaitable>
when_all_task<await_result_t<Awaitable&&>>
make_when_all_task(Awaitable awaitable) {
    co_yield co_await static_cast<Awaitable&&>(awaitable);
}

template <typename... Awaitables>
auto when_all(Awaitables&&... awaitables) {
    return when_all_awaitable<
        std::tuple<when_all_task<await_result_t<Awaitables>>...>>{
        std::make_tuple(make_when_all_task(std::move(awaitables))...)};
}
// when_all implementation ends here

// simple awaitable
struct awaitable {
    bool await_ready() noexcept {return true;}
    void await_suspend(std::coroutine_handle<> h) noexcept {}
    int await_resume() noexcept { return 96; }
};
// simple awaitable ends here

int main() {
    // HALO tests
    // successfully elides if we're calling when_all on (n <= 2) awaitables
    {
        auto whenAllTask_1 = when_all(awaitable{});
        auto whenAllTask_2 = when_all(awaitable{}, awaitable{});
    }
    // fails to elide (n > 2) awaitables
    {
        auto whenAllTask_3 = when_all(awaitable{}, awaitable{}, awaitable{}); // no linker error if we remove this line
    }
}
```

The code snippet above is a simple implementation of when_all(). The implementation above first wraps the awaitables passed to its function to when_all_task<T>..., then assembles tuple of tasks into when_all_awaitable<std::tuple<when_all_task<T>...>>.

It should be easy enough to perform optimization on above code, but Clang fails to perform HALO on cases where more than 2 awaitables are passed to the when_all() function.
</pre>
<img width="1px" height="1px" alt="" src="http://email.email.llvm.org/o/eJy1Wktz4joW_jWwUV0KTCBhwYKQdE1XTdVsspmVS9gCe1pYvpIcmv71c_S0bMtAPy5Fd2JbOu_znXPkHFh-3RZS1mKy3E2SL_A9sfzAqJwxfoKrH_APb5744f3j5b-T-dtkvrP_r-f2ay6TZVlltMkJmiz3WLJzmU2W77GHGeOskWVFRp6T7xmpZcmqkeefmJe4kiNPZVPTMcrAlZby2j61a5Ta6FKQKsWUovIMFM6kklgLoddIAjexNByuNanwmaAPTyijWAhPIZVYfJssX0MmkRVpxppKEo4mz3Zp3RyoMpvdg-AT3TFJXoTM1brlTpQ_SCqRAEdtQLod0mtSoAn30CR5RQtg8DZJ9ghfcAl2P6mHcAuZHy2vT1bmiJNTKYBH2rop4OZvpgWucmXoPRgBFZo5sPQsQJQ3uL18RR0WB8YosMD5Fai6TZzIhldW8NmRyKxIRXOAFQsltmd-JmfGrynjOYiHs79TTqjR-h0tBqzuyWzY5exStaK0u9WnPIL9Xn5DrDdlhEWUtvpYvVu3LHuLOvoEOzzrirG64yjg1aHiKdgA4-UnRHEnwjwxl7T7Xmy1xrIBEzK4Z-WhciGFX02utOYgqyB_jNDNHMSNZOhEZGrMn7LD_0gmx6Km76OIYUZ0eTcbjnDtbwKTQdxMkp0sSnHH18Y7gGGZTmzYhFhNOHiZo4pcxjBE55FBRMUESV6eToQLRMvqG4AV4RwIQGr8a_fv_6AjLqmYDdk6wo0AL-Sg6wVfBSoriAVMU3vbWrBiBvFDLPCRFtNK--NYVjcpdc0mJG8yaaIxRNzQrBqb9Io0RKiYeEdMdfANM_ThnAhFH1C4R2UsgJBdcdMWvUA1W2ZBwCWbmS006V9AtA-UA5iKQpX66ILiLCqaM4n5qb-xjzCBqNZ9ERiKRIlm3lRGvTxtu4qw8IiGArZ73NA_M2XhRKVjWPgazqEl6JNxCTMSpNeS0Dz9xLRRqn9MkjV8LdvbTrKi6TLaInSew33BjkDMExkzViRBHjKZBTmDGR7ihouVNhYVjSRxOATIJVx6gdNZWeXku1udLJSC85gapvpGd5nCmowWVm8w0KXg7NLxmn8IsreOtny8Sx8rxACqPXKLAblHbA44rW000u2tkf81orHLVR0rsNit7Svx5ypSW38Gpp-Fif6zrciI-juv4q0GxI8Fbf9yZhVTJZB0-jXIorRmpSaolusGIVzgoyWtJffdQy8ze6L8ia7mZhPSCOiikHVCqghqf484zijVFWhg4TuNfcirbfJhwjAr9IhRoP4gMc6rd62hkMnCRLVDwl7edbmFHoLbJxVmmsLMLQI3Vg2lym86VR6Ube1bI4UtD4oa6zJc33PBNZAJpOqJOYraLrfuoMZN20K6VEJqsTVYopxQIkmH5s8Y4B69UNL3vmQ3JqvWGnEYt89nOYH2jV1Huo8ONv8jAOvk6LZIjmS7857TcpJRnUzJi-oOFK_79bPTpfVl6G__5dnvkfS33vpTiKebOXygjwxxs9kMfYgHKLXYb8-B9h8CNmswfH_soKWlFsDjgJrBBBVFwzqoUEs_6YLWmX0qmm6PO5UJahtSgxg7KhbQLYrbEHZvpHHNchD0gWprhB2ct3fVAc6Ao6M3Mh7dnIpGKbRt6aMHCKMo4cU3GZla-74EG6MTSzhT6O3OEbNAvXG0GUF-Fyj-BiQ9vuo-I0j_8CmF8UBHlukco2mtBHm3kRc3QHfAunXc1PZBdU2vo-PnZPU6Wb15qFLhrrIwiN6RsTLG6oy_AZBo09gEGOiogz7uKWf3ZB9_1rrQZt3jfhtJjQE0ORLhtZ7w_JFKxgyNYVq0U6ur7ih-thFD6LZ6-cD-rYwJgjxyznsrZwIhUnVEHkqi_atHtFSQvxtS6Rl6gGYqfA2yjJUqdREWgN4RlYrAr6KTBX0DDWQbiPXVY3jEPrDTTL6bgIA-tQIBeErBZkCiNK8fYikwnDC_2sz2wdlvHczUuddjsBLt9tnGw4NTW1mi81K_pLmSNV7f7bHgmUhsKnBBKIQ_gjQg3yXHUI1MRiN2RLgN_E5Zj_cGYTGzsND1jRl__NhzB0tbKB2CYxh74z1MXEwjRShlKrU8fm34zI9h2ty6jXr9LUsiKJlqLSd3265dC2FW9t7bqX1XC7ixCyBQfXXPZPbq_O539n59gJf9lAJU1GdgHh7tkXSaYTHC06KQJddx1QNdoifYooTGaSd8KLdoyxoO7vVUsCge7TbvNAGPm1wYa_tWdXy07JTSqF_6DWdHt7bS9g4vwrgceRUaDz-7R-i1QSgM8lqHSkexB07crfWBjM6ewevKn20ni5vnz4Dr986rXTxs1t0KHpozbpG4-RTLMy6jLyEtIf2mRcIQLAaPRJNlRIhjQ-kVEVrCqKyG6wtI_swJysCJCrG8Q5k6YXipkLaGPUIN4iOoOf1pwiXRjtIPVWQWneOnTtZGS3yUSHKXyB7dodsd_41V9FspDabKIk7j999Rd_krko4I78Ss2OCl2oVAfKm8NS_fqGrLIo2A_6X3Rxj6_48CHM9Ab1GVdU0gog-KIJDDLiZ7eQ1FJtBMQQRSRHqrDJljyYVEF45rsHARhLdAtTrrz5XdSynQsakyvS9AXweEHxbrwEJAo9IvCc6ahkY2JZDuRlRCsgcH_TEeBlhDA32FKlSwBqrSAZISC8icijWnQkkKVfjI-Bn6elmeyx_WPk55ZVYl9KGRaE8xZJYPNbdRJytsgAJH9BkFpOGZceVQKORJaC8Md1ubKWN2veBNOJuS7WK9Xq43683qaZpvl_lmucFTWUpKtjCp7R3YCZjZUNaVTM95Xi7cmthDJDJvtMVs2nDa_3OgUhbNAYbjM1xQ-ul-_FVzZl6DfymFaFRx-bJ6flkl02KbLxeL583ikOHN4XjAG_x0fN6sksVxs1yQp8NqSvGBUKEknyRJRS5Ik4DfQfxpuU3mSTLfJPPFyzxZPs0WBD-tn1aL5ctzAlRWk6c5AcikMyWH-julKd9qkQ7NScBD1aSL9iFYuDxVRBtK0YfcLhjfigpCuGCyrqea-1ZL_39AwvoV">