Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 2 | #ifndef _ASM_X86_XOR_64_H |
| 3 | #define _ASM_X86_XOR_64_H |
Vegard Nossum | 0db125c | 2008-06-10 23:45:45 +0200 | [diff] [blame] | 4 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 5 | static struct xor_block_template xor_block_sse = { |
Joe Perches | 687c805 | 2008-03-23 01:04:03 -0700 | [diff] [blame] | 6 | .name = "generic_sse", |
| 7 | .do_2 = xor_sse_2, |
| 8 | .do_3 = xor_sse_3, |
| 9 | .do_4 = xor_sse_4, |
| 10 | .do_5 = xor_sse_5, |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 11 | }; |
| 12 | |
Jim Kukunas | ea4d26a | 2012-05-22 13:54:04 +1000 | [diff] [blame] | 13 | |
| 14 | /* Also try the AVX routines */ |
David Howells | a1ce392 | 2012-10-02 18:01:25 +0100 | [diff] [blame] | 15 | #include <asm/xor_avx.h> |
Jim Kukunas | ea4d26a | 2012-05-22 13:54:04 +1000 | [diff] [blame] | 16 | |
Jan Beulich | f317820 | 2012-11-02 14:20:24 +0000 | [diff] [blame] | 17 | /* We force the use of the SSE xor block because it can write around L2. |
| 18 | We may also be able to load into the L1 only depending on how the cpu |
| 19 | deals with a load to a line that is being prefetched. */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 20 | #undef XOR_TRY_TEMPLATES |
Joe Perches | 687c805 | 2008-03-23 01:04:03 -0700 | [diff] [blame] | 21 | #define XOR_TRY_TEMPLATES \ |
| 22 | do { \ |
Jim Kukunas | ea4d26a | 2012-05-22 13:54:04 +1000 | [diff] [blame] | 23 | AVX_XOR_SPEED; \ |
Jan Beulich | f317820 | 2012-11-02 14:20:24 +0000 | [diff] [blame] | 24 | xor_speed(&xor_block_sse_pf64); \ |
Joe Perches | 687c805 | 2008-03-23 01:04:03 -0700 | [diff] [blame] | 25 | xor_speed(&xor_block_sse); \ |
| 26 | } while (0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 27 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 28 | #endif /* _ASM_X86_XOR_64_H */ |