Tom Stellard | 70f13db | 2013-10-10 17:11:46 +0000 | [diff] [blame] | 1 | ; RUN: llc -march=r600 -mcpu=SI -verify-machineinstrs< %s | FileCheck -check-prefix=SI %s |
Matt Arsenault | 6f24379 | 2013-09-05 19:41:10 +0000 | [diff] [blame] | 2 | ; RUN: llc -march=r600 -mcpu=cypress < %s | FileCheck -check-prefix=EG %s |
| 3 | |
Matt Arsenault | 6f24379 | 2013-09-05 19:41:10 +0000 | [diff] [blame] | 4 | define void @trunc_i64_to_i32_store(i32 addrspace(1)* %out, i64 %in) { |
| 5 | ; SI-LABEL: @trunc_i64_to_i32_store |
Matt Arsenault | 72b31ee | 2013-11-12 02:35:51 +0000 | [diff] [blame] | 6 | ; SI: S_LOAD_DWORD s0, s[0:1], 11 |
| 7 | ; SI: V_MOV_B32_e32 v0, s0 |
| 8 | ; SI: BUFFER_STORE_DWORD v0 |
Matt Arsenault | 6f24379 | 2013-09-05 19:41:10 +0000 | [diff] [blame] | 9 | |
| 10 | ; EG-LABEL: @trunc_i64_to_i32_store |
| 11 | ; EG: MEM_RAT_CACHELESS STORE_RAW T0.X, T1.X, 1 |
| 12 | ; EG: LSHR |
| 13 | ; EG-NEXT: 2( |
| 14 | |
| 15 | %result = trunc i64 %in to i32 store i32 %result, i32 addrspace(1)* %out, align 4 |
| 16 | ret void |
| 17 | } |
| 18 | |
Matt Arsenault | a7f1e0c | 2014-03-24 19:43:31 +0000 | [diff] [blame] | 19 | ; SI-LABEL: @trunc_load_shl_i64: |
| 20 | ; SI-DAG: S_LOAD_DWORDX2 |
| 21 | ; SI-DAG: S_LOAD_DWORD [[SREG:s[0-9]+]], |
| 22 | ; SI: S_LSHL_B32 [[SHL:s[0-9]+]], [[SREG]], 2 |
| 23 | ; SI: V_MOV_B32_e32 [[VSHL:v[0-9]+]], [[SHL]] |
| 24 | ; SI: BUFFER_STORE_DWORD [[VSHL]], |
| 25 | define void @trunc_load_shl_i64(i32 addrspace(1)* %out, i64 %a) { |
| 26 | %b = shl i64 %a, 2 |
| 27 | %result = trunc i64 %b to i32 |
| 28 | store i32 %result, i32 addrspace(1)* %out, align 4 |
| 29 | ret void |
| 30 | } |
| 31 | |
Matt Arsenault | 204cfa6 | 2013-10-10 18:04:16 +0000 | [diff] [blame] | 32 | ; SI-LABEL: @trunc_shl_i64: |
Matt Arsenault | a7f1e0c | 2014-03-24 19:43:31 +0000 | [diff] [blame] | 33 | ; SI: S_LOAD_DWORDX2 s{{\[}}[[LO_SREG:[0-9]+]]:{{[0-9]+\]}}, |
| 34 | ; SI: V_ADD_I32_e32 v[[LO_ADD:[0-9]+]], s[[LO_SREG]], |
| 35 | ; SI: V_LSHL_B64 v{{\[}}[[LO_VREG:[0-9]+]]:{{[0-9]+\]}}, v{{\[}}[[LO_ADD]]:{{[0-9]+\]}}, 2 |
Matt Arsenault | 72b31ee | 2013-11-12 02:35:51 +0000 | [diff] [blame] | 36 | ; SI: BUFFER_STORE_DWORD v[[LO_VREG]], |
Matt Arsenault | 204cfa6 | 2013-10-10 18:04:16 +0000 | [diff] [blame] | 37 | define void @trunc_shl_i64(i32 addrspace(1)* %out, i64 %a) { |
Matt Arsenault | a7f1e0c | 2014-03-24 19:43:31 +0000 | [diff] [blame] | 38 | %aa = add i64 %a, 234 ; Prevent shrinking store. |
| 39 | %b = shl i64 %aa, 2 |
Matt Arsenault | 204cfa6 | 2013-10-10 18:04:16 +0000 | [diff] [blame] | 40 | %result = trunc i64 %b to i32 |
| 41 | store i32 %result, i32 addrspace(1)* %out, align 4 |
| 42 | ret void |
| 43 | } |
Michel Danzer | bf1a641 | 2014-01-28 03:01:16 +0000 | [diff] [blame] | 44 | |
| 45 | ; SI-LABEL: @trunc_i32_to_i1: |
| 46 | ; SI: V_AND_B32 |
| 47 | ; SI: V_CMP_EQ_I32 |
| 48 | define void @trunc_i32_to_i1(i32 addrspace(1)* %out, i32 %a) { |
| 49 | %trunc = trunc i32 %a to i1 |
| 50 | %result = select i1 %trunc, i32 1, i32 0 |
| 51 | store i32 %result, i32 addrspace(1)* %out, align 4 |
| 52 | ret void |
| 53 | } |