blob: c81556409bbb87cfbfb5b2b8b2cdd26e8e916730 [file] [log] [blame]
Borislav Petkov132ec922009-08-31 09:50:09 +02001#include <linux/linkage.h>
2#include <linux/errno.h>
3#include <asm/asm.h>
4#include <asm/msr.h>
5
6#ifdef CONFIG_X86_64
7/*
Andre Przywara1f975f72012-06-01 16:52:35 +02008 * int {rdmsr,wrmsr}_safe_regs(u32 gprs[8]);
Borislav Petkov132ec922009-08-31 09:50:09 +02009 *
10 * reg layout: u32 gprs[eax, ecx, edx, ebx, esp, ebp, esi, edi]
11 *
12 */
H. Peter Anvinf6909f32009-09-01 13:31:52 -070013.macro op_safe_regs op
Andre Przywara1f975f72012-06-01 16:52:35 +020014ENTRY(\op\()_safe_regs)
Ingo Molnar131484c2015-05-28 12:21:47 +020015 pushq %rbx
16 pushq %rbp
H. Peter Anvin79c5dca2009-08-31 13:59:53 -070017 movq %rdi, %r10 /* Save pointer */
18 xorl %r11d, %r11d /* Return value */
Borislav Petkov132ec922009-08-31 09:50:09 +020019 movl (%rdi), %eax
20 movl 4(%rdi), %ecx
21 movl 8(%rdi), %edx
22 movl 12(%rdi), %ebx
23 movl 20(%rdi), %ebp
24 movl 24(%rdi), %esi
25 movl 28(%rdi), %edi
261: \op
H. Peter Anvin79c5dca2009-08-31 13:59:53 -0700272: movl %eax, (%r10)
28 movl %r11d, %eax /* Return value */
29 movl %ecx, 4(%r10)
30 movl %edx, 8(%r10)
31 movl %ebx, 12(%r10)
32 movl %ebp, 20(%r10)
33 movl %esi, 24(%r10)
34 movl %edi, 28(%r10)
Ingo Molnar131484c2015-05-28 12:21:47 +020035 popq %rbp
36 popq %rbx
Borislav Petkov132ec922009-08-31 09:50:09 +020037 ret
383:
H. Peter Anvin79c5dca2009-08-31 13:59:53 -070039 movl $-EIO, %r11d
Borislav Petkov132ec922009-08-31 09:50:09 +020040 jmp 2b
H. Peter Anvin79c5dca2009-08-31 13:59:53 -070041
42 _ASM_EXTABLE(1b, 3b)
Andre Przywara1f975f72012-06-01 16:52:35 +020043ENDPROC(\op\()_safe_regs)
Borislav Petkov132ec922009-08-31 09:50:09 +020044.endm
45
46#else /* X86_32 */
47
Ingo Molnar8adf65c2009-09-03 21:26:34 +020048.macro op_safe_regs op
Andre Przywara1f975f72012-06-01 16:52:35 +020049ENTRY(\op\()_safe_regs)
Ingo Molnar131484c2015-05-28 12:21:47 +020050 pushl %ebx
51 pushl %ebp
52 pushl %esi
53 pushl %edi
54 pushl $0 /* Return value */
55 pushl %eax
Borislav Petkov132ec922009-08-31 09:50:09 +020056 movl 4(%eax), %ecx
57 movl 8(%eax), %edx
58 movl 12(%eax), %ebx
59 movl 20(%eax), %ebp
60 movl 24(%eax), %esi
61 movl 28(%eax), %edi
62 movl (%eax), %eax
631: \op
Ingo Molnar131484c2015-05-28 12:21:47 +0200642: pushl %eax
Borislav Petkov132ec922009-08-31 09:50:09 +020065 movl 4(%esp), %eax
Ingo Molnar131484c2015-05-28 12:21:47 +020066 popl (%eax)
Borislav Petkov132ec922009-08-31 09:50:09 +020067 addl $4, %esp
68 movl %ecx, 4(%eax)
69 movl %edx, 8(%eax)
70 movl %ebx, 12(%eax)
71 movl %ebp, 20(%eax)
72 movl %esi, 24(%eax)
73 movl %edi, 28(%eax)
Ingo Molnar131484c2015-05-28 12:21:47 +020074 popl %eax
75 popl %edi
76 popl %esi
77 popl %ebp
78 popl %ebx
Borislav Petkov132ec922009-08-31 09:50:09 +020079 ret
803:
81 movl $-EIO, 4(%esp)
82 jmp 2b
H. Peter Anvin79c5dca2009-08-31 13:59:53 -070083
84 _ASM_EXTABLE(1b, 3b)
Andre Przywara1f975f72012-06-01 16:52:35 +020085ENDPROC(\op\()_safe_regs)
Borislav Petkov132ec922009-08-31 09:50:09 +020086.endm
87
88#endif
89
90op_safe_regs rdmsr
91op_safe_regs wrmsr
92