
[NVPTX] Add Prefetch tensormap intrinsics This PR adds prefetch intrinsics with the relevant tensormap_space. * Lit tests are added as part of prefetch.ll * The generated PTX is verified with a 12.3 ptxas executable. * Added docs for these intrinsics in NVPTXUsage.rst. For more information, refer to the PTX ISA for prefetch intrinsic : [Prefetch Tensormap](https://docs.nvidia.com/cuda/parallel-thread-execution/#data-movement-and-conversion-instructions-prefetch-prefetchu) @durga4github @schwarzschild-radius
124 lines
4.9 KiB
LLVM
124 lines
4.9 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
|
|
; RUN: llc < %s -mtriple=nvptx64 -mcpu=sm_90 -mattr=+ptx80| FileCheck --check-prefixes=CHECK-PTX64 %s
|
|
; RUN: %if ptxas-12.3 %{ llc < %s -mtriple=nvptx64 -mcpu=sm_90 -mattr=+ptx80| %ptxas-verify -arch=sm_90 %}
|
|
|
|
target triple = "nvptx64-nvidia-cuda"
|
|
|
|
declare void @llvm.nvvm.prefetch.global.L1(ptr addrspace(1) %global_ptr)
|
|
declare void @llvm.nvvm.prefetch.global.L2(ptr addrspace(1) %global_ptr)
|
|
declare void @llvm.nvvm.prefetch.local.L1(ptr addrspace(5) %local_ptr)
|
|
declare void @llvm.nvvm.prefetch.local.L2(ptr addrspace(5) %local_ptr)
|
|
|
|
declare void @llvm.nvvm.prefetch.L1(ptr %ptr)
|
|
declare void @llvm.nvvm.prefetch.L2(ptr %ptr)
|
|
|
|
declare void @llvm.nvvm.prefetch.tensormap.p0(ptr %ptr)
|
|
declare void @llvm.nvvm.prefetch.tensormap.p4(ptr addrspace(4) %const_ptr)
|
|
declare void @llvm.nvvm.prefetch.tensormap.p101(ptr addrspace(101) %param_ptr)
|
|
|
|
declare void @llvm.nvvm.prefetch.global.L2.evict.normal(ptr addrspace(1) %global_ptr)
|
|
declare void @llvm.nvvm.prefetch.global.L2.evict.last(ptr addrspace(1) %global_ptr)
|
|
|
|
declare void @llvm.nvvm.prefetchu.L1(ptr %ptr)
|
|
|
|
define void @prefetch_local(ptr addrspace(5) %local_ptr) {
|
|
; CHECK-PTX64-LABEL: prefetch_local(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetch_local_param_0];
|
|
; CHECK-PTX64-NEXT: prefetch.local.L1 [%rd1];
|
|
; CHECK-PTX64-NEXT: prefetch.local.L2 [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetch.local.L1(ptr addrspace(5) %local_ptr)
|
|
tail call void @llvm.nvvm.prefetch.local.L2(ptr addrspace(5) %local_ptr)
|
|
ret void
|
|
}
|
|
|
|
define void @prefetch_global(ptr addrspace(1) %global_ptr) {
|
|
; CHECK-PTX64-LABEL: prefetch_global(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetch_global_param_0];
|
|
; CHECK-PTX64-NEXT: prefetch.global.L1 [%rd1];
|
|
; CHECK-PTX64-NEXT: prefetch.global.L2 [%rd1];
|
|
; CHECK-PTX64-NEXT: prefetch.global.L2::evict_normal [%rd1];
|
|
; CHECK-PTX64-NEXT: prefetch.global.L2::evict_last [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetch.global.L1(ptr addrspace(1) %global_ptr)
|
|
tail call void @llvm.nvvm.prefetch.global.L2(ptr addrspace(1) %global_ptr)
|
|
tail call void @llvm.nvvm.prefetch.global.L2.evict.normal(ptr addrspace(1) %global_ptr)
|
|
tail call void @llvm.nvvm.prefetch.global.L2.evict.last(ptr addrspace(1) %global_ptr)
|
|
ret void
|
|
}
|
|
|
|
|
|
define void @prefetch_(ptr %ptr) {
|
|
; CHECK-PTX64-LABEL: prefetch_(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetch__param_0];
|
|
; CHECK-PTX64-NEXT: prefetch.L1 [%rd1];
|
|
; CHECK-PTX64-NEXT: prefetch.L2 [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetch.L1(ptr %ptr)
|
|
tail call void @llvm.nvvm.prefetch.L2(ptr %ptr)
|
|
ret void
|
|
}
|
|
|
|
define void @prefetchu_l1(ptr %ptr) {
|
|
; CHECK-PTX64-LABEL: prefetchu_l1(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetchu_l1_param_0];
|
|
; CHECK-PTX64-NEXT: prefetchu.L1 [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetchu.L1(ptr %ptr)
|
|
ret void
|
|
}
|
|
|
|
define void @prefetch_tensormap(ptr %ptr) {
|
|
; CHECK-PTX64-LABEL: prefetch_tensormap(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetch_tensormap_param_0];
|
|
; CHECK-PTX64-NEXT: prefetch.tensormap [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetch.tensormap.p0(ptr %ptr)
|
|
ret void
|
|
}
|
|
|
|
define void @prefetch_const_tensormap(ptr addrspace(4) %const_ptr) {
|
|
; CHECK-PTX64-LABEL: prefetch_const_tensormap(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetch_const_tensormap_param_0];
|
|
; CHECK-PTX64-NEXT: prefetch.const.tensormap [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetch.tensormap.p4(ptr addrspace(4) %const_ptr)
|
|
ret void
|
|
}
|
|
|
|
define void @prefetch_param_tensormap(ptr addrspace(101) %param_ptr) {
|
|
; CHECK-PTX64-LABEL: prefetch_param_tensormap(
|
|
; CHECK-PTX64: {
|
|
; CHECK-PTX64-NEXT: .reg .b64 %rd<2>;
|
|
; CHECK-PTX64-EMPTY:
|
|
; CHECK-PTX64-NEXT: // %bb.0:
|
|
; CHECK-PTX64-NEXT: ld.param.b64 %rd1, [prefetch_param_tensormap_param_0];
|
|
; CHECK-PTX64-NEXT: prefetch.param.tensormap [%rd1];
|
|
; CHECK-PTX64-NEXT: ret;
|
|
tail call void @llvm.nvvm.prefetch.tensormap.p101(ptr addrspace(101) %param_ptr)
|
|
ret void
|
|
} |