[llvm] [Offload] Implement the remaining initial Offload API (PR #122106)
Callum Fare via llvm-commits
llvm-commits at lists.llvm.org
Tue Feb 11 09:50:36 PST 2025
================
@@ -245,3 +308,300 @@ ol_impl_result_t olGetDeviceInfoSize_impl(ol_device_handle_t Device,
size_t *PropSizeRet) {
return olGetDeviceInfoImplDetail(Device, PropName, 0, nullptr, PropSizeRet);
}
+
+TargetAllocTy convertOlToPluginAllocTy(ol_alloc_type_t Type) {
+ switch (Type) {
+ case OL_ALLOC_TYPE_DEVICE:
+ return TARGET_ALLOC_DEVICE;
+ case OL_ALLOC_TYPE_HOST:
+ return TARGET_ALLOC_HOST;
+ case OL_ALLOC_TYPE_SHARED:
+ default:
+ return TARGET_ALLOC_SHARED;
+ }
+}
+
+ol_impl_result_t olMemAlloc_impl(ol_device_handle_t Device,
+ ol_alloc_type_t Type, size_t Size,
+ void **AllocationOut) {
+ auto Alloc =
+ Device->Device.dataAlloc(Size, nullptr, convertOlToPluginAllocTy(Type));
+ if (!Alloc) {
+ return {OL_ERRC_OUT_OF_RESOURCES,
+ formatv("Could not create allocation on device {0}", Device).str()};
+ }
+
+ *AllocationOut = *Alloc;
+ return OL_SUCCESS;
+}
+
+ol_impl_result_t olMemFree_impl(ol_device_handle_t Device, ol_alloc_type_t Type,
+ void *Address) {
+ auto Res = Device->Device.dataDelete(Address, convertOlToPluginAllocTy(Type));
+ if (Res) {
+ return {OL_ERRC_OUT_OF_RESOURCES, "Could not free allocation"};
+ }
+ return OL_SUCCESS;
+}
+
+ol_impl_result_t olCreateQueue_impl(ol_device_handle_t Device,
+ ol_queue_handle_t *Queue) {
+ auto CreatedQueue = std::make_unique<ol_queue_handle_t_>();
+ auto Err = Device->Device.initAsyncInfo(&(CreatedQueue->AsyncInfo));
+ if (Err) {
+ return OL_ERRC_OUT_OF_RESOURCES;
+ }
+ // TODO: Check error
+ CreatedQueue->Device = Device;
+ CreatedQueue->RefCount = 1;
+ *Queue = CreatedQueue.release();
----------------
callumfare wrote:
It's useful when you need the allocation to happen before multiple possible early exits in the functions - means you don't need to remember to free the allocation each time. But I can change this if you'd rather it happens explicitly.
https://github.com/llvm/llvm-project/pull/122106
More information about the llvm-commits
mailing list