Matt Arsenault | e2d1d3a | 2017-02-27 19:24:47 +0000 | [diff] [blame^] | 1 | ; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=SI %s |
| 2 | ; RUN: llc -march=amdgcn -mcpu=tonga -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=VI %s |
Tom Stellard | 0fbf899 | 2015-10-06 21:16:34 +0000 | [diff] [blame] | 3 | |
| 4 | ; Make sure we don't crash or assert on spir_kernel calling convention. |
| 5 | |
Matt Arsenault | e2d1d3a | 2017-02-27 19:24:47 +0000 | [diff] [blame^] | 6 | ; GCN-LABEL: {{^}}kernel: |
| 7 | ; GCN: s_endpgm |
Tom Stellard | 0fbf899 | 2015-10-06 21:16:34 +0000 | [diff] [blame] | 8 | define spir_kernel void @kernel(i32 addrspace(1)* %out) { |
| 9 | entry: |
| 10 | store i32 0, i32 addrspace(1)* %out |
| 11 | ret void |
| 12 | } |
| 13 | |
| 14 | ; FIXME: This is treated like a kernel |
Matt Arsenault | e2d1d3a | 2017-02-27 19:24:47 +0000 | [diff] [blame^] | 15 | ; GCN-LABEL: {{^}}func: |
| 16 | ; GCN: s_endpgm |
Tom Stellard | 0fbf899 | 2015-10-06 21:16:34 +0000 | [diff] [blame] | 17 | define spir_func void @func(i32 addrspace(1)* %out) { |
| 18 | entry: |
| 19 | store i32 0, i32 addrspace(1)* %out |
| 20 | ret void |
| 21 | } |
Matt Arsenault | e2d1d3a | 2017-02-27 19:24:47 +0000 | [diff] [blame^] | 22 | |
| 23 | ; GCN-LABEL: {{^}}ps_ret_cc_f16: |
| 24 | ; SI: v_cvt_f16_f32_e32 v0, v0 |
| 25 | ; SI: v_cvt_f32_f16_e32 v0, v0 |
| 26 | ; SI: v_add_f32_e32 v0, 1.0, v0 |
| 27 | |
| 28 | ; VI: v_add_f16_e32 v0, 1.0, v0 |
| 29 | ; VI: ; return |
| 30 | define amdgpu_ps half @ps_ret_cc_f16(half %arg0) { |
| 31 | %add = fadd half %arg0, 1.0 |
| 32 | ret half %add |
| 33 | } |
| 34 | |
| 35 | ; GCN-LABEL: {{^}}ps_ret_cc_inreg_f16: |
| 36 | ; SI: v_cvt_f16_f32_e32 v0, s0 |
| 37 | ; SI: v_cvt_f32_f16_e32 v0, v0 |
| 38 | ; SI: v_add_f32_e32 v0, 1.0, v0 |
| 39 | |
| 40 | ; VI: v_add_f16_e64 v0, s0, 1.0 |
| 41 | ; VI: ; return |
| 42 | define amdgpu_ps half @ps_ret_cc_inreg_f16(half inreg %arg0) { |
| 43 | %add = fadd half %arg0, 1.0 |
| 44 | ret half %add |
| 45 | } |