David Green | eecba95 | 2020-04-22 16:33:11 +0100 | [diff] [blame] | 1 | // RUN: %clang_cc1 -triple thumbv8.1m.main-none-none-eabi \ |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 2 | // RUN: -target-feature +cdecp0 -target-feature +mve.fp \ |
| 3 | // RUN: -mfloat-abi hard -O0 -disable-O0-optnone \ |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 4 | // RUN: -S -emit-llvm -o - %s | opt -S -mem2reg | FileCheck %s --check-prefixes=CHECK,CHECK-LE |
| 5 | // RUN: %clang_cc1 -triple thumbebv8.1m.main-arm-none-eabi \ |
| 6 | // RUN: -target-feature +cdecp0 -target-feature +mve.fp \ |
| 7 | // RUN: -mfloat-abi hard -O0 -disable-O0-optnone \ |
| 8 | // RUN: -S -emit-llvm -o - %s | opt -S -mem2reg | FileCheck %s --check-prefixes=CHECK,CHECK-BE |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 9 | |
| 10 | #include <arm_cde.h> |
| 11 | |
| 12 | // CHECK-LABEL: @test_s8( |
| 13 | // CHECK-NEXT: entry: |
| 14 | // CHECK-NEXT: ret <16 x i8> [[X:%.*]] |
| 15 | // |
| 16 | int8x16_t test_s8(uint8x16_t x) { |
| 17 | return __arm_vreinterpretq_s8_u8(x); |
| 18 | } |
| 19 | |
| 20 | // CHECK-LABEL: @test_u16( |
| 21 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 22 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <8 x i16> |
| 23 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <8 x i16> @llvm.arm.mve.vreinterpretq.v8i16.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 24 | // CHECK-NEXT: ret <8 x i16> [[TMP0]] |
| 25 | // |
| 26 | uint16x8_t test_u16(uint8x16_t x) { |
| 27 | return __arm_vreinterpretq_u16_u8(x); |
| 28 | } |
| 29 | |
| 30 | // CHECK-LABEL: @test_s32( |
| 31 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 32 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <4 x i32> |
| 33 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <4 x i32> @llvm.arm.mve.vreinterpretq.v4i32.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 34 | // CHECK-NEXT: ret <4 x i32> [[TMP0]] |
| 35 | // |
| 36 | int32x4_t test_s32(uint8x16_t x) { |
| 37 | return __arm_vreinterpretq_s32_u8(x); |
| 38 | } |
| 39 | |
| 40 | // CHECK-LABEL: @test_u32( |
| 41 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 42 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <4 x i32> |
| 43 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <4 x i32> @llvm.arm.mve.vreinterpretq.v4i32.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 44 | // CHECK-NEXT: ret <4 x i32> [[TMP0]] |
| 45 | // |
| 46 | uint32x4_t test_u32(uint8x16_t x) { |
| 47 | return __arm_vreinterpretq_u32_u8(x); |
| 48 | } |
| 49 | |
| 50 | // CHECK-LABEL: @test_s64( |
| 51 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 52 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <2 x i64> |
| 53 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <2 x i64> @llvm.arm.mve.vreinterpretq.v2i64.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 54 | // CHECK-NEXT: ret <2 x i64> [[TMP0]] |
| 55 | // |
| 56 | int64x2_t test_s64(uint8x16_t x) { |
| 57 | return __arm_vreinterpretq_s64_u8(x); |
| 58 | } |
| 59 | |
| 60 | // CHECK-LABEL: @test_u64( |
| 61 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 62 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <2 x i64> |
| 63 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <2 x i64> @llvm.arm.mve.vreinterpretq.v2i64.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 64 | // CHECK-NEXT: ret <2 x i64> [[TMP0]] |
| 65 | // |
| 66 | uint64x2_t test_u64(uint8x16_t x) { |
| 67 | return __arm_vreinterpretq_u64_u8(x); |
| 68 | } |
| 69 | |
| 70 | // CHECK-LABEL: @test_f16( |
| 71 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 72 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <8 x half> |
| 73 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <8 x half> @llvm.arm.mve.vreinterpretq.v8f16.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 74 | // CHECK-NEXT: ret <8 x half> [[TMP0]] |
| 75 | // |
| 76 | float16x8_t test_f16(uint8x16_t x) { |
| 77 | return __arm_vreinterpretq_f16_u8(x); |
| 78 | } |
| 79 | |
| 80 | // CHECK-LABEL: @test_f32( |
| 81 | // CHECK-NEXT: entry: |
Mikhail Maltsev | bd722ef | 2020-03-27 16:05:18 +0000 | [diff] [blame] | 82 | // CHECK-LE-NEXT: [[TMP0:%.*]] = bitcast <16 x i8> [[X:%.*]] to <4 x float> |
| 83 | // CHECK-BE-NEXT: [[TMP0:%.*]] = call <4 x float> @llvm.arm.mve.vreinterpretq.v4f32.v16i8(<16 x i8> [[X:%.*]]) |
Mikhail Maltsev | 6ae3eff | 2020-03-20 14:01:57 +0000 | [diff] [blame] | 84 | // CHECK-NEXT: ret <4 x float> [[TMP0]] |
| 85 | // |
| 86 | float32x4_t test_f32(uint8x16_t x) { |
| 87 | return __arm_vreinterpretq_f32_u8(x); |
| 88 | } |