blob: dfce6c681500d45ef997464add10f948f7f78137 [file] [log] [blame]
Sanjay Patel91d1ed52016-12-02 17:48:48 +00001; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
Sanjay Patela5dbdf32016-12-02 17:58:26 +00002; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=-bmi | FileCheck %s --check-prefix=ALL --check-prefix=NO_BMI
3; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefix=ALL --check-prefix=BMI
Sanjay Patel91d1ed52016-12-02 17:48:48 +00004
5; Clear high bits via shift, set them with xor (not), then mask them off.
6
7define i32 @shrink_xor_constant1(i32 %x) {
Sanjay Patel1f158d62016-12-05 15:58:21 +00008; ALL-LABEL: shrink_xor_constant1:
9; ALL: # BB#0:
10; ALL-NEXT: shrl $31, %edi
11; ALL-NEXT: xorl $1, %edi
12; ALL-NEXT: movl %edi, %eax
13; ALL-NEXT: retq
Sanjay Patel91d1ed52016-12-02 17:48:48 +000014;
15 %sh = lshr i32 %x, 31
16 %not = xor i32 %sh, -1
17 %and = and i32 %not, 1
18 ret i32 %and
19}
20
21; Clear low bits via shift, set them with xor (not), then mask them off.
22
23define i8 @shrink_xor_constant2(i8 %x) {
Sanjay Patela5dbdf32016-12-02 17:58:26 +000024; ALL-LABEL: shrink_xor_constant2:
25; ALL: # BB#0:
26; ALL-NEXT: shlb $5, %dil
Sanjay Patel1f158d62016-12-05 15:58:21 +000027; ALL-NEXT: xorb $-32, %dil
Sanjay Patela5dbdf32016-12-02 17:58:26 +000028; ALL-NEXT: movl %edi, %eax
29; ALL-NEXT: retq
Sanjay Patel91d1ed52016-12-02 17:48:48 +000030;
31 %sh = shl i8 %x, 5
32 %not = xor i8 %sh, -1
33 %and = and i8 %not, 224 ; 0xE0
34 ret i8 %and
35}
36