Tom Stellard | 64a9d08 | 2016-10-14 18:10:39 +0000 | [diff] [blame^] | 1 | ; RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=bonaire -mattr=-promote-alloca < %s | FileCheck -check-prefix=CHECK -check-prefix=CHECK-NO-PROMOTE %s |
| 2 | ; RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=bonaire -mattr=+promote-alloca < %s | FileCheck -check-prefix=CHECK -check-prefix=CHECK-PROMOTE %s |
| 3 | ; RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=tonga -mattr=-promote-alloca < %s | FileCheck -check-prefix=CHECK -check-prefix=CHECK-NO-PROMOTE %s |
| 4 | ; RUN: llc -O0 -mtriple=amdgcn-mesa-mesa3d -mcpu=tonga -mattr=+promote-alloca < %s | FileCheck -check-prefix=CHECK -check-prefix=CHECK-PROMOTE %s |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 5 | |
| 6 | ; Disable optimizations in case there are optimizations added that |
| 7 | ; specialize away generic pointer accesses. |
| 8 | |
| 9 | |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 10 | ; These testcases might become useless when there are optimizations to |
| 11 | ; remove generic pointers. |
| 12 | |
Tom Stellard | 79243d9 | 2014-10-01 17:15:17 +0000 | [diff] [blame] | 13 | ; CHECK-LABEL: {{^}}store_flat_i32: |
Nicolai Haehnle | dd58705 | 2015-12-19 01:16:06 +0000 | [diff] [blame] | 14 | ; CHECK-DAG: s_load_dwordx2 s{{\[}}[[LO_SREG:[0-9]+]]:[[HI_SREG:[0-9]+]]], |
| 15 | ; CHECK-DAG: s_load_dword s[[SDATA:[0-9]+]], |
| 16 | ; CHECK: s_waitcnt lgkmcnt(0) |
| 17 | ; CHECK-DAG: v_mov_b32_e32 v[[DATA:[0-9]+]], s[[SDATA]] |
| 18 | ; CHECK-DAG: v_mov_b32_e32 v[[LO_VREG:[0-9]+]], s[[LO_SREG]] |
| 19 | ; CHECK-DAG: v_mov_b32_e32 v[[HI_VREG:[0-9]+]], s[[HI_SREG]] |
Tom Stellard | 46937ca | 2016-02-12 17:57:54 +0000 | [diff] [blame] | 20 | ; CHECK: flat_store_dword v{{\[}}[[LO_VREG]]:[[HI_VREG]]{{\]}}, v[[DATA]] |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 21 | define void @store_flat_i32(i32 addrspace(1)* %gptr, i32 %x) #0 { |
| 22 | %fptr = addrspacecast i32 addrspace(1)* %gptr to i32 addrspace(4)* |
| 23 | store i32 %x, i32 addrspace(4)* %fptr, align 4 |
| 24 | ret void |
| 25 | } |
| 26 | |
Tom Stellard | 79243d9 | 2014-10-01 17:15:17 +0000 | [diff] [blame] | 27 | ; CHECK-LABEL: {{^}}store_flat_i64: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 28 | ; CHECK: flat_store_dwordx2 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 29 | define void @store_flat_i64(i64 addrspace(1)* %gptr, i64 %x) #0 { |
| 30 | %fptr = addrspacecast i64 addrspace(1)* %gptr to i64 addrspace(4)* |
| 31 | store i64 %x, i64 addrspace(4)* %fptr, align 8 |
| 32 | ret void |
| 33 | } |
| 34 | |
Tom Stellard | 79243d9 | 2014-10-01 17:15:17 +0000 | [diff] [blame] | 35 | ; CHECK-LABEL: {{^}}store_flat_v4i32: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 36 | ; CHECK: flat_store_dwordx4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 37 | define void @store_flat_v4i32(<4 x i32> addrspace(1)* %gptr, <4 x i32> %x) #0 { |
| 38 | %fptr = addrspacecast <4 x i32> addrspace(1)* %gptr to <4 x i32> addrspace(4)* |
| 39 | store <4 x i32> %x, <4 x i32> addrspace(4)* %fptr, align 16 |
| 40 | ret void |
| 41 | } |
| 42 | |
Tom Stellard | 79243d9 | 2014-10-01 17:15:17 +0000 | [diff] [blame] | 43 | ; CHECK-LABEL: {{^}}store_flat_trunc_i16: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 44 | ; CHECK: flat_store_short |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 45 | define void @store_flat_trunc_i16(i16 addrspace(1)* %gptr, i32 %x) #0 { |
| 46 | %fptr = addrspacecast i16 addrspace(1)* %gptr to i16 addrspace(4)* |
| 47 | %y = trunc i32 %x to i16 |
| 48 | store i16 %y, i16 addrspace(4)* %fptr, align 2 |
| 49 | ret void |
| 50 | } |
| 51 | |
Tom Stellard | 79243d9 | 2014-10-01 17:15:17 +0000 | [diff] [blame] | 52 | ; CHECK-LABEL: {{^}}store_flat_trunc_i8: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 53 | ; CHECK: flat_store_byte |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 54 | define void @store_flat_trunc_i8(i8 addrspace(1)* %gptr, i32 %x) #0 { |
| 55 | %fptr = addrspacecast i8 addrspace(1)* %gptr to i8 addrspace(4)* |
| 56 | %y = trunc i32 %x to i8 |
| 57 | store i8 %y, i8 addrspace(4)* %fptr, align 2 |
| 58 | ret void |
| 59 | } |
| 60 | |
| 61 | |
| 62 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 63 | ; CHECK-LABEL: load_flat_i32: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 64 | ; CHECK: flat_load_dword |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 65 | define void @load_flat_i32(i32 addrspace(1)* noalias %out, i32 addrspace(1)* noalias %gptr) #0 { |
| 66 | %fptr = addrspacecast i32 addrspace(1)* %gptr to i32 addrspace(4)* |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 67 | %fload = load i32, i32 addrspace(4)* %fptr, align 4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 68 | store i32 %fload, i32 addrspace(1)* %out, align 4 |
| 69 | ret void |
| 70 | } |
| 71 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 72 | ; CHECK-LABEL: load_flat_i64: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 73 | ; CHECK: flat_load_dwordx2 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 74 | define void @load_flat_i64(i64 addrspace(1)* noalias %out, i64 addrspace(1)* noalias %gptr) #0 { |
| 75 | %fptr = addrspacecast i64 addrspace(1)* %gptr to i64 addrspace(4)* |
Tom Stellard | 64a9d08 | 2016-10-14 18:10:39 +0000 | [diff] [blame^] | 76 | %fload = load i64, i64 addrspace(4)* %fptr, align 8 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 77 | store i64 %fload, i64 addrspace(1)* %out, align 8 |
| 78 | ret void |
| 79 | } |
| 80 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 81 | ; CHECK-LABEL: load_flat_v4i32: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 82 | ; CHECK: flat_load_dwordx4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 83 | define void @load_flat_v4i32(<4 x i32> addrspace(1)* noalias %out, <4 x i32> addrspace(1)* noalias %gptr) #0 { |
| 84 | %fptr = addrspacecast <4 x i32> addrspace(1)* %gptr to <4 x i32> addrspace(4)* |
Tom Stellard | 64a9d08 | 2016-10-14 18:10:39 +0000 | [diff] [blame^] | 85 | %fload = load <4 x i32>, <4 x i32> addrspace(4)* %fptr, align 32 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 86 | store <4 x i32> %fload, <4 x i32> addrspace(1)* %out, align 8 |
| 87 | ret void |
| 88 | } |
| 89 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 90 | ; CHECK-LABEL: sextload_flat_i8: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 91 | ; CHECK: flat_load_sbyte |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 92 | define void @sextload_flat_i8(i32 addrspace(1)* noalias %out, i8 addrspace(1)* noalias %gptr) #0 { |
| 93 | %fptr = addrspacecast i8 addrspace(1)* %gptr to i8 addrspace(4)* |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 94 | %fload = load i8, i8 addrspace(4)* %fptr, align 4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 95 | %ext = sext i8 %fload to i32 |
| 96 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 97 | ret void |
| 98 | } |
| 99 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 100 | ; CHECK-LABEL: zextload_flat_i8: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 101 | ; CHECK: flat_load_ubyte |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 102 | define void @zextload_flat_i8(i32 addrspace(1)* noalias %out, i8 addrspace(1)* noalias %gptr) #0 { |
| 103 | %fptr = addrspacecast i8 addrspace(1)* %gptr to i8 addrspace(4)* |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 104 | %fload = load i8, i8 addrspace(4)* %fptr, align 4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 105 | %ext = zext i8 %fload to i32 |
| 106 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 107 | ret void |
| 108 | } |
| 109 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 110 | ; CHECK-LABEL: sextload_flat_i16: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 111 | ; CHECK: flat_load_sshort |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 112 | define void @sextload_flat_i16(i32 addrspace(1)* noalias %out, i16 addrspace(1)* noalias %gptr) #0 { |
| 113 | %fptr = addrspacecast i16 addrspace(1)* %gptr to i16 addrspace(4)* |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 114 | %fload = load i16, i16 addrspace(4)* %fptr, align 4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 115 | %ext = sext i16 %fload to i32 |
| 116 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 117 | ret void |
| 118 | } |
| 119 | |
Hans Wennborg | 4a61370 | 2015-08-31 21:10:35 +0000 | [diff] [blame] | 120 | ; CHECK-LABEL: zextload_flat_i16: |
Tom Stellard | 326d6ec | 2014-11-05 14:50:53 +0000 | [diff] [blame] | 121 | ; CHECK: flat_load_ushort |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 122 | define void @zextload_flat_i16(i32 addrspace(1)* noalias %out, i16 addrspace(1)* noalias %gptr) #0 { |
| 123 | %fptr = addrspacecast i16 addrspace(1)* %gptr to i16 addrspace(4)* |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 124 | %fload = load i16, i16 addrspace(4)* %fptr, align 4 |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 125 | %ext = zext i16 %fload to i32 |
| 126 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 127 | ret void |
| 128 | } |
| 129 | |
Tom Stellard | 64a9d08 | 2016-10-14 18:10:39 +0000 | [diff] [blame^] | 130 | ; CHECK-LABEL: flat_scratch_unaligned_load: |
| 131 | ; CHECK: flat_load_ubyte |
| 132 | ; CHECK: flat_load_ubyte |
| 133 | ; CHECK: flat_load_ubyte |
| 134 | ; CHECK: flat_load_ubyte |
| 135 | define void @flat_scratch_unaligned_load() { |
| 136 | %scratch = alloca i32 |
| 137 | %fptr = addrspacecast i32* %scratch to i32 addrspace(4)* |
| 138 | %ld = load volatile i32, i32 addrspace(4)* %fptr, align 1 |
| 139 | ret void |
| 140 | } |
| 141 | |
| 142 | ; CHECK-LABEL: flat_scratch_unaligned_store: |
| 143 | ; CHECK: flat_store_byte |
| 144 | ; CHECK: flat_store_byte |
| 145 | ; CHECK: flat_store_byte |
| 146 | ; CHECK: flat_store_byte |
| 147 | define void @flat_scratch_unaligned_store() { |
| 148 | %scratch = alloca i32 |
| 149 | %fptr = addrspacecast i32* %scratch to i32 addrspace(4)* |
| 150 | store volatile i32 0, i32 addrspace(4)* %fptr, align 1 |
| 151 | ret void |
| 152 | } |
| 153 | |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 154 | attributes #0 = { nounwind } |
Matt Arsenault | 2aed6ca | 2015-12-19 01:46:41 +0000 | [diff] [blame] | 155 | attributes #1 = { nounwind convergent } |
Matt Arsenault | 3f98140 | 2014-09-15 15:41:53 +0000 | [diff] [blame] | 156 | attributes #3 = { nounwind readnone } |