blob: de36884611450f1ae50d20861648dd4e46c2cb3a [file] [log] [blame]
Tom Lendacky6ebcb062017-07-17 16:10:32 -05001/*
2 * AMD Memory Encryption Support
3 *
4 * Copyright (C) 2016 Advanced Micro Devices, Inc.
5 *
6 * Author: Tom Lendacky <thomas.lendacky@amd.com>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12
13#include <linux/linkage.h>
14#include <asm/pgtable.h>
15#include <asm/page.h>
16#include <asm/processor-flags.h>
17#include <asm/msr-index.h>
Tom Lendacky6ebcb062017-07-17 16:10:32 -050018
19 .text
20 .code64
21ENTRY(sme_encrypt_execute)
22
23 /*
24 * Entry parameters:
25 * RDI - virtual address for the encrypted kernel mapping
26 * RSI - virtual address for the decrypted kernel mapping
27 * RDX - length of kernel
28 * RCX - virtual address of the encryption workarea, including:
29 * - stack page (PAGE_SIZE)
30 * - encryption routine page (PAGE_SIZE)
31 * - intermediate copy buffer (PMD_PAGE_SIZE)
32 * R8 - physcial address of the pagetables to use for encryption
33 */
34
Borislav Petkov6e0b52d2017-08-27 18:39:24 +020035 push %rbp
36 movq %rsp, %rbp /* RBP now has original stack pointer */
Tom Lendacky6ebcb062017-07-17 16:10:32 -050037
38 /* Set up a one page stack in the non-encrypted memory area */
39 movq %rcx, %rax /* Workarea stack page */
40 leaq PAGE_SIZE(%rax), %rsp /* Set new stack pointer */
41 addq $PAGE_SIZE, %rax /* Workarea encryption routine */
42
43 push %r12
44 movq %rdi, %r10 /* Encrypted kernel */
45 movq %rsi, %r11 /* Decrypted kernel */
46 movq %rdx, %r12 /* Kernel length */
47
48 /* Copy encryption routine into the workarea */
49 movq %rax, %rdi /* Workarea encryption routine */
50 leaq __enc_copy(%rip), %rsi /* Encryption routine */
51 movq $(.L__enc_copy_end - __enc_copy), %rcx /* Encryption routine length */
52 rep movsb
53
54 /* Setup registers for call */
55 movq %r10, %rdi /* Encrypted kernel */
56 movq %r11, %rsi /* Decrypted kernel */
57 movq %r8, %rdx /* Pagetables used for encryption */
58 movq %r12, %rcx /* Kernel length */
59 movq %rax, %r8 /* Workarea encryption routine */
60 addq $PAGE_SIZE, %r8 /* Workarea intermediate copy buffer */
61
62 call *%rax /* Call the encryption routine */
63
64 pop %r12
65
66 movq %rbp, %rsp /* Restore original stack pointer */
Borislav Petkov6e0b52d2017-08-27 18:39:24 +020067 pop %rbp
Tom Lendacky6ebcb062017-07-17 16:10:32 -050068
69 ret
70ENDPROC(sme_encrypt_execute)
71
72ENTRY(__enc_copy)
73/*
74 * Routine used to encrypt kernel.
75 * This routine must be run outside of the kernel proper since
76 * the kernel will be encrypted during the process. So this
77 * routine is defined here and then copied to an area outside
78 * of the kernel where it will remain and run decrypted
79 * during execution.
80 *
81 * On entry the registers must be:
82 * RDI - virtual address for the encrypted kernel mapping
83 * RSI - virtual address for the decrypted kernel mapping
84 * RDX - address of the pagetables to use for encryption
85 * RCX - length of kernel
86 * R8 - intermediate copy buffer
87 *
88 * RAX - points to this routine
89 *
90 * The kernel will be encrypted by copying from the non-encrypted
91 * kernel space to an intermediate buffer and then copying from the
92 * intermediate buffer back to the encrypted kernel space. The physical
93 * addresses of the two kernel space mappings are the same which
94 * results in the kernel being encrypted "in place".
95 */
96 /* Enable the new page tables */
97 mov %rdx, %cr3
98
99 /* Flush any global TLBs */
100 mov %cr4, %rdx
101 andq $~X86_CR4_PGE, %rdx
102 mov %rdx, %cr4
103 orq $X86_CR4_PGE, %rdx
104 mov %rdx, %cr4
105
Tom Lendacky13038802018-01-10 13:25:56 -0600106 push %r15
Tom Lendacky6ebcb062017-07-17 16:10:32 -0500107
108 movq %rcx, %r9 /* Save kernel length */
109 movq %rdi, %r10 /* Save encrypted kernel address */
110 movq %rsi, %r11 /* Save decrypted kernel address */
111
Tom Lendacky13038802018-01-10 13:25:56 -0600112 /* Set the PAT register PA5 entry to write-protect */
113 movl $MSR_IA32_CR_PAT, %ecx
114 rdmsr
115 mov %rdx, %r15 /* Save original PAT value */
116 andl $0xffff00ff, %edx /* Clear PA5 */
117 orl $0x00000500, %edx /* Set PA5 to WP */
118 wrmsr
119
Tom Lendacky6ebcb062017-07-17 16:10:32 -0500120 wbinvd /* Invalidate any cache entries */
121
122 /* Copy/encrypt 2MB at a time */
1231:
124 movq %r11, %rsi /* Source - decrypted kernel */
125 movq %r8, %rdi /* Dest - intermediate copy buffer */
126 movq $PMD_PAGE_SIZE, %rcx /* 2MB length */
127 rep movsb
128
129 movq %r8, %rsi /* Source - intermediate copy buffer */
130 movq %r10, %rdi /* Dest - encrypted kernel */
131 movq $PMD_PAGE_SIZE, %rcx /* 2MB length */
132 rep movsb
133
134 addq $PMD_PAGE_SIZE, %r11
135 addq $PMD_PAGE_SIZE, %r10
136 subq $PMD_PAGE_SIZE, %r9 /* Kernel length decrement */
137 jnz 1b /* Kernel length not zero? */
138
139 /* Restore PAT register */
Tom Lendacky6ebcb062017-07-17 16:10:32 -0500140 movl $MSR_IA32_CR_PAT, %ecx
141 rdmsr
Tom Lendacky13038802018-01-10 13:25:56 -0600142 mov %r15, %rdx /* Restore original PAT value */
Tom Lendacky6ebcb062017-07-17 16:10:32 -0500143 wrmsr
144
Tom Lendacky13038802018-01-10 13:25:56 -0600145 pop %r15
146
Tom Lendacky6ebcb062017-07-17 16:10:32 -0500147 ret
148.L__enc_copy_end:
149ENDPROC(__enc_copy)