Eric Christopher | abb2b54 | 2016-08-04 06:02:50 +0000 | [diff] [blame] | 1 | // RUN: %clang_cc1 %s -DTEST_XSAVE -O0 -triple=i686-unknown-unknown -target-feature +xsave -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVE |
| 2 | // RUN: %clang_cc1 %s -DTEST_XSAVE -O0 -triple=i686-unknown-unknown -target-feature +xsave -fno-signed-char -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVE |
Dimitry Andric | b1aa87e | 2016-01-03 15:55:40 +0000 | [diff] [blame] | 3 | |
Eric Christopher | abb2b54 | 2016-08-04 06:02:50 +0000 | [diff] [blame] | 4 | // RUN: %clang_cc1 %s -DTEST_XSAVEOPT -O0 -triple=i686-unknown-unknown -target-feature +xsave -target-feature +xsaveopt -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVEOPT |
| 5 | // RUN: %clang_cc1 %s -DTEST_XSAVEOPT -O0 -triple=i686-unknown-unknown -target-feature +xsave -target-feature +xsaveopt -fno-signed-char -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVEOPT |
Dimitry Andric | b1aa87e | 2016-01-03 15:55:40 +0000 | [diff] [blame] | 6 | |
Eric Christopher | abb2b54 | 2016-08-04 06:02:50 +0000 | [diff] [blame] | 7 | // RUN: %clang_cc1 %s -DTEST_XSAVEC -O0 -triple=i686-unknown-unknown -target-feature +xsave -target-feature +xsavec -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVEC |
| 8 | // RUN: %clang_cc1 %s -DTEST_XSAVEC -O0 -triple=i686-unknown-unknown -target-feature +xsave -target-feature +xsavec -fno-signed-char -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVEC |
Dimitry Andric | b1aa87e | 2016-01-03 15:55:40 +0000 | [diff] [blame] | 9 | |
Eric Christopher | abb2b54 | 2016-08-04 06:02:50 +0000 | [diff] [blame] | 10 | // RUN: %clang_cc1 %s -DTEST_XSAVES -O0 -triple=i686-unknown-unknown -target-feature +xsave -target-feature +xsaves -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVES |
| 11 | // RUN: %clang_cc1 %s -DTEST_XSAVES -O0 -triple=i686-unknown-unknown -target-feature +xsave -target-feature +xsaves -fno-signed-char -emit-llvm -o - -Wall -Werror | FileCheck %s --check-prefix=XSAVES |
Dimitry Andric | b1aa87e | 2016-01-03 15:55:40 +0000 | [diff] [blame] | 12 | |
| 13 | void test() { |
Reid Kleckner | 66e7717 | 2016-08-16 16:04:14 +0000 | [diff] [blame] | 14 | unsigned long long tmp_ULLi = 0; |
| 15 | void* tmp_vp = 0; |
Dimitry Andric | b1aa87e | 2016-01-03 15:55:40 +0000 | [diff] [blame] | 16 | |
| 17 | #ifdef TEST_XSAVE |
| 18 | // XSAVE: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 4 |
| 19 | // XSAVE: [[tmp_ULLi_1:%[0-9a-zA-z]+]] = load i64, i64* %tmp_ULLi, align 8 |
| 20 | // XSAVE: [[high64_1:%[0-9a-zA-z]+]] = lshr i64 [[tmp_ULLi_1]], 32 |
| 21 | // XSAVE: [[high32_1:%[0-9a-zA-z]+]] = trunc i64 [[high64_1]] to i32 |
| 22 | // XSAVE: [[low32_1:%[0-9a-zA-z]+]] = trunc i64 [[tmp_ULLi_1]] to i32 |
| 23 | // XSAVE: call void @llvm.x86.xsave(i8* [[tmp_vp_1]], i32 [[high32_1]], i32 [[low32_1]]) |
| 24 | (void)__builtin_ia32_xsave(tmp_vp, tmp_ULLi); |
| 25 | |
| 26 | // XSAVE: [[tmp_vp_3:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 4 |
| 27 | // XSAVE: [[tmp_ULLi_3:%[0-9a-zA-z]+]] = load i64, i64* %tmp_ULLi, align 8 |
| 28 | // XSAVE: [[high64_3:%[0-9a-zA-z]+]] = lshr i64 [[tmp_ULLi_3]], 32 |
| 29 | // XSAVE: [[high32_3:%[0-9a-zA-z]+]] = trunc i64 [[high64_3]] to i32 |
| 30 | // XSAVE: [[low32_3:%[0-9a-zA-z]+]] = trunc i64 [[tmp_ULLi_3]] to i32 |
| 31 | // XSAVE: call void @llvm.x86.xrstor(i8* [[tmp_vp_3]], i32 [[high32_3]], i32 [[low32_3]]) |
| 32 | (void)__builtin_ia32_xrstor(tmp_vp, tmp_ULLi); |
| 33 | #endif |
| 34 | |
| 35 | #ifdef TEST_XSAVEOPT |
| 36 | // XSAVEOPT: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 4 |
| 37 | // XSAVEOPT: [[tmp_ULLi_1:%[0-9a-zA-z]+]] = load i64, i64* %tmp_ULLi, align 8 |
| 38 | // XSAVEOPT: [[high64_1:%[0-9a-zA-z]+]] = lshr i64 [[tmp_ULLi_1]], 32 |
| 39 | // XSAVEOPT: [[high32_1:%[0-9a-zA-z]+]] = trunc i64 [[high64_1]] to i32 |
| 40 | // XSAVEOPT: [[low32_1:%[0-9a-zA-z]+]] = trunc i64 [[tmp_ULLi_1]] to i32 |
| 41 | // XSAVEOPT: call void @llvm.x86.xsaveopt(i8* [[tmp_vp_1]], i32 [[high32_1]], i32 [[low32_1]]) |
| 42 | (void)__builtin_ia32_xsaveopt(tmp_vp, tmp_ULLi); |
| 43 | #endif |
| 44 | |
| 45 | #ifdef TEST_XSAVEC |
| 46 | // XSAVEC: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 4 |
| 47 | // XSAVEC: [[tmp_ULLi_1:%[0-9a-zA-z]+]] = load i64, i64* %tmp_ULLi, align 8 |
| 48 | // XSAVEC: [[high64_1:%[0-9a-zA-z]+]] = lshr i64 [[tmp_ULLi_1]], 32 |
| 49 | // XSAVEC: [[high32_1:%[0-9a-zA-z]+]] = trunc i64 [[high64_1]] to i32 |
| 50 | // XSAVEC: [[low32_1:%[0-9a-zA-z]+]] = trunc i64 [[tmp_ULLi_1]] to i32 |
| 51 | // XSAVEC: call void @llvm.x86.xsavec(i8* [[tmp_vp_1]], i32 [[high32_1]], i32 [[low32_1]]) |
| 52 | (void)__builtin_ia32_xsavec(tmp_vp, tmp_ULLi); |
| 53 | #endif |
| 54 | |
| 55 | #ifdef TEST_XSAVES |
| 56 | // XSAVES: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 4 |
| 57 | // XSAVES: [[tmp_ULLi_1:%[0-9a-zA-z]+]] = load i64, i64* %tmp_ULLi, align 8 |
| 58 | // XSAVES: [[high64_1:%[0-9a-zA-z]+]] = lshr i64 [[tmp_ULLi_1]], 32 |
| 59 | // XSAVES: [[high32_1:%[0-9a-zA-z]+]] = trunc i64 [[high64_1]] to i32 |
| 60 | // XSAVES: [[low32_1:%[0-9a-zA-z]+]] = trunc i64 [[tmp_ULLi_1]] to i32 |
| 61 | // XSAVES: call void @llvm.x86.xsaves(i8* [[tmp_vp_1]], i32 [[high32_1]], i32 [[low32_1]]) |
| 62 | (void)__builtin_ia32_xsaves(tmp_vp, tmp_ULLi); |
| 63 | |
| 64 | // XSAVES: [[tmp_vp_3:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 4 |
| 65 | // XSAVES: [[tmp_ULLi_3:%[0-9a-zA-z]+]] = load i64, i64* %tmp_ULLi, align 8 |
| 66 | // XSAVES: [[high64_3:%[0-9a-zA-z]+]] = lshr i64 [[tmp_ULLi_3]], 32 |
| 67 | // XSAVES: [[high32_3:%[0-9a-zA-z]+]] = trunc i64 [[high64_3]] to i32 |
| 68 | // XSAVES: [[low32_3:%[0-9a-zA-z]+]] = trunc i64 [[tmp_ULLi_3]] to i32 |
| 69 | // XSAVES: call void @llvm.x86.xrstors(i8* [[tmp_vp_3]], i32 [[high32_3]], i32 [[low32_3]]) |
| 70 | (void)__builtin_ia32_xrstors(tmp_vp, tmp_ULLi); |
| 71 | #endif |
Reid Kleckner | 66e7717 | 2016-08-16 16:04:14 +0000 | [diff] [blame] | 72 | } |