| Howard Hinnant | 9ad441f | 2010-11-16 22:13:33 +0000 | [diff] [blame] | 1 | // This file is dual licensed under the MIT and the University of Illinois Open | 
 | 2 | // Source Licenses. See LICENSE.TXT for details. | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 3 |  | 
| Daniel Dunbar | 19336a2 | 2009-10-27 17:49:50 +0000 | [diff] [blame] | 4 | #include "../assembly.h" | 
 | 5 |  | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 6 | // di_int __ashrdi3(di_int input, int count); | 
 | 7 |  | 
 | 8 | #ifdef __i386__ | 
 | 9 | #ifdef __SSE2__ | 
 | 10 |  | 
 | 11 | .text | 
 | 12 | .align 4 | 
| Daniel Dunbar | b4b1e8c | 2009-10-27 17:50:21 +0000 | [diff] [blame] | 13 | DEFINE_COMPILERRT_FUNCTION(__ashrdi3) | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 14 | 	movd	  12(%esp),		%xmm2	// Load count | 
 | 15 | 	movl	   8(%esp),		%eax | 
 | 16 | #ifndef TRUST_CALLERS_USE_64_BIT_STORES | 
 | 17 | 	movd	   4(%esp),		%xmm0 | 
 | 18 | 	movd	   8(%esp),		%xmm1 | 
 | 19 | 	punpckldq	%xmm1,		%xmm0	// Load input | 
 | 20 | #else | 
 | 21 | 	movq	   4(%esp),		%xmm0	// Load input | 
 | 22 | #endif | 
 | 23 |  | 
 | 24 | 	psrlq		%xmm2,		%xmm0	// unsigned shift input by count | 
 | 25 | 	 | 
 | 26 | 	testl		%eax,		%eax	// check the sign-bit of the input | 
 | 27 | 	jns			1f					// early out for positive inputs | 
 | 28 | 	 | 
 | 29 | 	// If the input is negative, we need to construct the shifted sign bit | 
 | 30 | 	// to or into the result, as xmm does not have a signed right shift. | 
 | 31 | 	pcmpeqb		%xmm1,		%xmm1	// -1ULL | 
 | 32 | 	psrlq		$58,		%xmm1	// 0x3f | 
 | 33 | 	pandn		%xmm1,		%xmm2	// 63 - count | 
 | 34 | 	pcmpeqb		%xmm1,		%xmm1	// -1ULL | 
 | 35 | 	psubq		%xmm1,		%xmm2	// 64 - count | 
 | 36 | 	psllq		%xmm2,		%xmm1	// -1 << (64 - count) = leading sign bits | 
 | 37 | 	por			%xmm1,		%xmm0 | 
 | 38 | 	 | 
 | 39 | 	// Move the result back to the general purpose registers and return | 
 | 40 | 1:	movd		%xmm0,		%eax | 
 | 41 | 	psrlq		$32,		%xmm0 | 
 | 42 | 	movd		%xmm0,		%edx | 
 | 43 | 	ret | 
 | 44 |  | 
 | 45 | #else // Use GPRs instead of SSE2 instructions, if they aren't available. | 
 | 46 |  | 
 | 47 | .text | 
 | 48 | .align 4 | 
| Daniel Dunbar | b4b1e8c | 2009-10-27 17:50:21 +0000 | [diff] [blame] | 49 | DEFINE_COMPILERRT_FUNCTION(__ashrdi3) | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 50 | 	movl	  12(%esp),		%ecx	// Load count | 
 | 51 | 	movl	   8(%esp),		%edx	// Load high | 
 | 52 | 	movl	   4(%esp),		%eax	// Load low | 
 | 53 | 	 | 
 | 54 | 	testl		$0x20,		%ecx	// If count >= 32 | 
| Eli Friedman | d02ea67 | 2009-07-03 02:26:38 +0000 | [diff] [blame] | 55 | 	jnz			1f					//    goto 1 | 
 | 56 |  | 
 | 57 | 	shrdl		%cl, %edx,	%eax	// right shift low by count | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 58 | 	sarl		%cl,		%edx	// right shift high by count | 
| Eli Friedman | d02ea67 | 2009-07-03 02:26:38 +0000 | [diff] [blame] | 59 | 	ret | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 60 | 	 | 
| Eli Friedman | d02ea67 | 2009-07-03 02:26:38 +0000 | [diff] [blame] | 61 | 1:	movl		%edx,		%eax	// Move high to low | 
| Daniel Dunbar | b3a6901 | 2009-06-26 16:47:03 +0000 | [diff] [blame] | 62 | 	sarl		$31,		%edx	// clear high | 
 | 63 | 	sarl		%cl,		%eax	// shift low by count - 32 | 
 | 64 | 	ret | 
 | 65 | 	 | 
 | 66 | #endif // __SSE2__ | 
 | 67 | #endif // __i386__ |