Matt Arsenault | 470acd8 | 2014-04-15 22:28:39 +0000 | [diff] [blame^] | 1 | ; RUN: llc -march=r600 -mcpu=SI < %s | FileCheck -check-prefix=SI %s |
| 2 | |
| 3 | |
| 4 | ; SI-LABEL: @global_copy_i1_to_i1 |
| 5 | ; SI: BUFFER_LOAD_UBYTE |
| 6 | ; SI: V_AND_B32_e32 v{{[0-9]+}}, 1 |
| 7 | ; SI: BUFFER_STORE_BYTE |
| 8 | ; SI: S_ENDPGM |
| 9 | define void @global_copy_i1_to_i1(i1 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
| 10 | %load = load i1 addrspace(1)* %in |
| 11 | store i1 %load, i1 addrspace(1)* %out, align 1 |
| 12 | ret void |
| 13 | } |
| 14 | |
| 15 | ; SI-LABEL: @global_sextload_i1_to_i32 |
| 16 | ; XSI: BUFFER_LOAD_BYTE |
| 17 | ; SI: BUFFER_STORE_DWORD |
| 18 | ; SI: S_ENDPGM |
| 19 | define void @global_sextload_i1_to_i32(i32 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
| 20 | %load = load i1 addrspace(1)* %in |
| 21 | %ext = sext i1 %load to i32 |
| 22 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 23 | ret void |
| 24 | } |
| 25 | |
| 26 | ; SI-LABEL: @global_zextload_i1_to_i32 |
| 27 | ; SI: BUFFER_LOAD_UBYTE |
| 28 | ; SI: BUFFER_STORE_DWORD |
| 29 | ; SI: S_ENDPGM |
| 30 | define void @global_zextload_i1_to_i32(i32 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
| 31 | %load = load i1 addrspace(1)* %in |
| 32 | %ext = zext i1 %load to i32 |
| 33 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 34 | ret void |
| 35 | } |
| 36 | |
| 37 | ; SI-LABEL: @global_sextload_i1_to_i64 |
| 38 | ; XSI: BUFFER_LOAD_BYTE |
| 39 | ; SI: BUFFER_STORE_DWORDX2 |
| 40 | ; SI: S_ENDPGM |
| 41 | define void @global_sextload_i1_to_i64(i64 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
| 42 | %load = load i1 addrspace(1)* %in |
| 43 | %ext = sext i1 %load to i64 |
| 44 | store i64 %ext, i64 addrspace(1)* %out, align 4 |
| 45 | ret void |
| 46 | } |
| 47 | |
| 48 | ; SI-LABEL: @global_zextload_i1_to_i64 |
| 49 | ; SI: BUFFER_LOAD_UBYTE |
| 50 | ; SI: BUFFER_STORE_DWORDX2 |
| 51 | ; SI: S_ENDPGM |
| 52 | define void @global_zextload_i1_to_i64(i64 addrspace(1)* %out, i1 addrspace(1)* %in) nounwind { |
| 53 | %load = load i1 addrspace(1)* %in |
| 54 | %ext = zext i1 %load to i64 |
| 55 | store i64 %ext, i64 addrspace(1)* %out, align 4 |
| 56 | ret void |
| 57 | } |
| 58 | |
| 59 | ; SI-LABEL: @i1_arg |
| 60 | ; SI: BUFFER_LOAD_UBYTE |
| 61 | ; SI: V_AND_B32_e32 |
| 62 | ; SI: BUFFER_STORE_BYTE |
| 63 | ; SI: S_ENDPGM |
| 64 | define void @i1_arg(i1 addrspace(1)* %out, i1 %x) nounwind { |
| 65 | store i1 %x, i1 addrspace(1)* %out, align 1 |
| 66 | ret void |
| 67 | } |
| 68 | |
| 69 | ; SI-LABEL: @i1_arg_zext_i32 |
| 70 | ; SI: BUFFER_LOAD_UBYTE |
| 71 | ; SI: BUFFER_STORE_DWORD |
| 72 | ; SI: S_ENDPGM |
| 73 | define void @i1_arg_zext_i32(i32 addrspace(1)* %out, i1 %x) nounwind { |
| 74 | %ext = zext i1 %x to i32 |
| 75 | store i32 %ext, i32 addrspace(1)* %out, align 4 |
| 76 | ret void |
| 77 | } |
| 78 | |
| 79 | ; SI-LABEL: @i1_arg_zext_i64 |
| 80 | ; SI: BUFFER_LOAD_UBYTE |
| 81 | ; SI: BUFFER_STORE_DWORDX2 |
| 82 | ; SI: S_ENDPGM |
| 83 | define void @i1_arg_zext_i64(i64 addrspace(1)* %out, i1 %x) nounwind { |
| 84 | %ext = zext i1 %x to i64 |
| 85 | store i64 %ext, i64 addrspace(1)* %out, align 8 |
| 86 | ret void |
| 87 | } |
| 88 | |
| 89 | ; SI-LABEL: @i1_arg_sext_i32 |
| 90 | ; XSI: BUFFER_LOAD_BYTE |
| 91 | ; SI: BUFFER_STORE_DWORD |
| 92 | ; SI: S_ENDPGM |
| 93 | define void @i1_arg_sext_i32(i32 addrspace(1)* %out, i1 %x) nounwind { |
| 94 | %ext = sext i1 %x to i32 |
| 95 | store i32 %ext, i32addrspace(1)* %out, align 4 |
| 96 | ret void |
| 97 | } |
| 98 | |
| 99 | ; SI-LABEL: @i1_arg_sext_i64 |
| 100 | ; XSI: BUFFER_LOAD_BYTE |
| 101 | ; SI: BUFFER_STORE_DWORDX2 |
| 102 | ; SI: S_ENDPGM |
| 103 | define void @i1_arg_sext_i64(i64 addrspace(1)* %out, i1 %x) nounwind { |
| 104 | %ext = sext i1 %x to i64 |
| 105 | store i64 %ext, i64 addrspace(1)* %out, align 8 |
| 106 | ret void |
| 107 | } |