| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 1 | /* | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 2 | * Based on arch/arm/include/asm/assembler.h, arch/arm/mm/proc-macros.S | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 3 | * | 
|  | 4 | * Copyright (C) 1996-2000 Russell King | 
|  | 5 | * Copyright (C) 2012 ARM Ltd. | 
|  | 6 | * | 
|  | 7 | * This program is free software; you can redistribute it and/or modify | 
|  | 8 | * it under the terms of the GNU General Public License version 2 as | 
|  | 9 | * published by the Free Software Foundation. | 
|  | 10 | * | 
|  | 11 | * This program is distributed in the hope that it will be useful, | 
|  | 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | 
|  | 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | 
|  | 14 | * GNU General Public License for more details. | 
|  | 15 | * | 
|  | 16 | * You should have received a copy of the GNU General Public License | 
|  | 17 | * along with this program.  If not, see <http://www.gnu.org/licenses/>. | 
|  | 18 | */ | 
|  | 19 | #ifndef __ASSEMBLY__ | 
|  | 20 | #error "Only include this from assembly code" | 
|  | 21 | #endif | 
|  | 22 |  | 
| Marc Zyngier | f3e3927 | 2015-02-20 13:53:13 +0000 | [diff] [blame] | 23 | #ifndef __ASM_ASSEMBLER_H | 
|  | 24 | #define __ASM_ASSEMBLER_H | 
|  | 25 |  | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 26 | #include <asm/asm-offsets.h> | 
| Andre Przywara | 823066d | 2016-06-28 18:07:29 +0100 | [diff] [blame] | 27 | #include <asm/cpufeature.h> | 
| Suzuki K Poulose | b8c3208 | 2018-03-26 15:12:49 +0100 | [diff] [blame] | 28 | #include <asm/cputype.h> | 
| Geoff Levand | 5003dbd | 2016-04-27 17:47:10 +0100 | [diff] [blame] | 29 | #include <asm/page.h> | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 30 | #include <asm/pgtable-hwdef.h> | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 31 | #include <asm/ptrace.h> | 
| Will Deacon | 2a28307 | 2014-04-29 19:04:06 +0100 | [diff] [blame] | 32 | #include <asm/thread_info.h> | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 33 |  | 
|  | 34 | /* | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 35 | * Enable and disable interrupts. | 
|  | 36 | */ | 
|  | 37 | .macro	disable_irq | 
|  | 38 | msr	daifset, #2 | 
|  | 39 | .endm | 
|  | 40 |  | 
|  | 41 | .macro	enable_irq | 
|  | 42 | msr	daifclr, #2 | 
|  | 43 | .endm | 
|  | 44 |  | 
| Catalin Marinas | 005bf1a | 2016-07-01 16:53:00 +0100 | [diff] [blame] | 45 | .macro	save_and_disable_irq, flags | 
|  | 46 | mrs	\flags, daif | 
|  | 47 | msr	daifset, #2 | 
|  | 48 | .endm | 
|  | 49 |  | 
|  | 50 | .macro	restore_irq, flags | 
|  | 51 | msr	daif, \flags | 
|  | 52 | .endm | 
|  | 53 |  | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 54 | /* | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 55 | * Enable and disable debug exceptions. | 
|  | 56 | */ | 
|  | 57 | .macro	disable_dbg | 
|  | 58 | msr	daifset, #8 | 
|  | 59 | .endm | 
|  | 60 |  | 
|  | 61 | .macro	enable_dbg | 
|  | 62 | msr	daifclr, #8 | 
|  | 63 | .endm | 
|  | 64 |  | 
| Will Deacon | 2a28307 | 2014-04-29 19:04:06 +0100 | [diff] [blame] | 65 | .macro	disable_step_tsk, flgs, tmp | 
|  | 66 | tbz	\flgs, #TIF_SINGLESTEP, 9990f | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 67 | mrs	\tmp, mdscr_el1 | 
|  | 68 | bic	\tmp, \tmp, #1 | 
|  | 69 | msr	mdscr_el1, \tmp | 
| Will Deacon | 2a28307 | 2014-04-29 19:04:06 +0100 | [diff] [blame] | 70 | isb	// Synchronise with enable_dbg | 
|  | 71 | 9990: | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 72 | .endm | 
|  | 73 |  | 
| Will Deacon | 2a28307 | 2014-04-29 19:04:06 +0100 | [diff] [blame] | 74 | .macro	enable_step_tsk, flgs, tmp | 
|  | 75 | tbz	\flgs, #TIF_SINGLESTEP, 9990f | 
|  | 76 | disable_dbg | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 77 | mrs	\tmp, mdscr_el1 | 
|  | 78 | orr	\tmp, \tmp, #1 | 
|  | 79 | msr	mdscr_el1, \tmp | 
| Will Deacon | 2a28307 | 2014-04-29 19:04:06 +0100 | [diff] [blame] | 80 | 9990: | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 81 | .endm | 
|  | 82 |  | 
| Will Deacon | 2a28307 | 2014-04-29 19:04:06 +0100 | [diff] [blame] | 83 | /* | 
|  | 84 | * Enable both debug exceptions and interrupts. This is likely to be | 
|  | 85 | * faster than two daifclr operations, since writes to this register | 
|  | 86 | * are self-synchronising. | 
|  | 87 | */ | 
|  | 88 | .macro	enable_dbg_and_irq | 
|  | 89 | msr	daifclr, #(8 | 2) | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 90 | .endm | 
|  | 91 |  | 
|  | 92 | /* | 
|  | 93 | * SMP data memory barrier | 
|  | 94 | */ | 
|  | 95 | .macro	smp_dmb, opt | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 96 | dmb	\opt | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 97 | .endm | 
|  | 98 |  | 
| Ard Biesheuvel | 6c94f27 | 2016-01-01 15:02:12 +0100 | [diff] [blame] | 99 | /* | 
| Mark Rutland | afc0954 | 2018-04-12 12:10:57 +0100 | [diff] [blame] | 100 | * Value prediction barrier | 
|  | 101 | */ | 
|  | 102 | .macro	csdb | 
|  | 103 | hint	#20 | 
|  | 104 | .endm | 
|  | 105 |  | 
|  | 106 | /* | 
| Mark Rutland | f3ed64a | 2018-04-12 12:11:02 +0100 | [diff] [blame] | 107 | * Sanitise a 64-bit bounded index wrt speculation, returning zero if out | 
|  | 108 | * of bounds. | 
|  | 109 | */ | 
|  | 110 | .macro	mask_nospec64, idx, limit, tmp | 
|  | 111 | sub	\tmp, \idx, \limit | 
|  | 112 | bic	\tmp, \tmp, \idx | 
|  | 113 | and	\idx, \idx, \tmp, asr #63 | 
|  | 114 | csdb | 
|  | 115 | .endm | 
|  | 116 |  | 
|  | 117 | /* | 
| Will Deacon | f99a250 | 2016-09-06 16:40:23 +0100 | [diff] [blame] | 118 | * NOP sequence | 
|  | 119 | */ | 
|  | 120 | .macro	nops, num | 
|  | 121 | .rept	\num | 
|  | 122 | nop | 
|  | 123 | .endr | 
|  | 124 | .endm | 
|  | 125 |  | 
|  | 126 | /* | 
| Ard Biesheuvel | 6c94f27 | 2016-01-01 15:02:12 +0100 | [diff] [blame] | 127 | * Emit an entry into the exception table | 
|  | 128 | */ | 
|  | 129 | .macro		_asm_extable, from, to | 
|  | 130 | .pushsection	__ex_table, "a" | 
|  | 131 | .align		3 | 
|  | 132 | .long		(\from - .), (\to - .) | 
|  | 133 | .popsection | 
|  | 134 | .endm | 
|  | 135 |  | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 136 | #define USER(l, x...)				\ | 
|  | 137 | 9999:	x;					\ | 
| Ard Biesheuvel | 6c94f27 | 2016-01-01 15:02:12 +0100 | [diff] [blame] | 138 | _asm_extable	9999b, l | 
| Catalin Marinas | 0be7320 | 2012-03-05 11:49:26 +0000 | [diff] [blame] | 139 |  | 
|  | 140 | /* | 
|  | 141 | * Register aliases. | 
|  | 142 | */ | 
|  | 143 | lr	.req	x30		// link register | 
| Marc Zyngier | dc637f1 | 2012-10-19 17:37:35 +0100 | [diff] [blame] | 144 |  | 
|  | 145 | /* | 
|  | 146 | * Vector entry | 
|  | 147 | */ | 
|  | 148 | .macro	ventry	label | 
|  | 149 | .align	7 | 
|  | 150 | b	\label | 
|  | 151 | .endm | 
| Matthew Leach | e68beda | 2013-10-11 14:52:15 +0100 | [diff] [blame] | 152 |  | 
|  | 153 | /* | 
|  | 154 | * Select code when configured for BE. | 
|  | 155 | */ | 
|  | 156 | #ifdef CONFIG_CPU_BIG_ENDIAN | 
|  | 157 | #define CPU_BE(code...) code | 
|  | 158 | #else | 
|  | 159 | #define CPU_BE(code...) | 
|  | 160 | #endif | 
|  | 161 |  | 
|  | 162 | /* | 
|  | 163 | * Select code when configured for LE. | 
|  | 164 | */ | 
|  | 165 | #ifdef CONFIG_CPU_BIG_ENDIAN | 
|  | 166 | #define CPU_LE(code...) | 
|  | 167 | #else | 
|  | 168 | #define CPU_LE(code...) code | 
|  | 169 | #endif | 
|  | 170 |  | 
| Matthew Leach | 55b8954 | 2013-10-11 14:52:13 +0100 | [diff] [blame] | 171 | /* | 
|  | 172 | * Define a macro that constructs a 64-bit value by concatenating two | 
|  | 173 | * 32-bit registers. Note that on big endian systems the order of the | 
|  | 174 | * registers is swapped. | 
|  | 175 | */ | 
|  | 176 | #ifndef CONFIG_CPU_BIG_ENDIAN | 
|  | 177 | .macro	regs_to_64, rd, lbits, hbits | 
|  | 178 | #else | 
|  | 179 | .macro	regs_to_64, rd, hbits, lbits | 
|  | 180 | #endif | 
|  | 181 | orr	\rd, \lbits, \hbits, lsl #32 | 
|  | 182 | .endm | 
| Marc Zyngier | f3e3927 | 2015-02-20 13:53:13 +0000 | [diff] [blame] | 183 |  | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 184 | /* | 
|  | 185 | * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 186 | * <symbol> is within the range +/- 4 GB of the PC when running | 
|  | 187 | * in core kernel context. In module context, a movz/movk sequence | 
|  | 188 | * is used, since modules may be loaded far away from the kernel | 
|  | 189 | * when KASLR is in effect. | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 190 | */ | 
|  | 191 | /* | 
|  | 192 | * @dst: destination register (64 bit wide) | 
|  | 193 | * @sym: name of the symbol | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 194 | */ | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 195 | .macro	adr_l, dst, sym | 
|  | 196 | #ifndef MODULE | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 197 | adrp	\dst, \sym | 
|  | 198 | add	\dst, \dst, :lo12:\sym | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 199 | #else | 
|  | 200 | movz	\dst, #:abs_g3:\sym | 
|  | 201 | movk	\dst, #:abs_g2_nc:\sym | 
|  | 202 | movk	\dst, #:abs_g1_nc:\sym | 
|  | 203 | movk	\dst, #:abs_g0_nc:\sym | 
|  | 204 | #endif | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 205 | .endm | 
|  | 206 |  | 
|  | 207 | /* | 
|  | 208 | * @dst: destination register (32 or 64 bit wide) | 
|  | 209 | * @sym: name of the symbol | 
|  | 210 | * @tmp: optional 64-bit scratch register to be used if <dst> is a | 
|  | 211 | *       32-bit wide register, in which case it cannot be used to hold | 
|  | 212 | *       the address | 
|  | 213 | */ | 
|  | 214 | .macro	ldr_l, dst, sym, tmp= | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 215 | #ifndef MODULE | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 216 | .ifb	\tmp | 
|  | 217 | adrp	\dst, \sym | 
|  | 218 | ldr	\dst, [\dst, :lo12:\sym] | 
|  | 219 | .else | 
|  | 220 | adrp	\tmp, \sym | 
|  | 221 | ldr	\dst, [\tmp, :lo12:\sym] | 
|  | 222 | .endif | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 223 | #else | 
|  | 224 | .ifb	\tmp | 
|  | 225 | adr_l	\dst, \sym | 
|  | 226 | ldr	\dst, [\dst] | 
|  | 227 | .else | 
|  | 228 | adr_l	\tmp, \sym | 
|  | 229 | ldr	\dst, [\tmp] | 
|  | 230 | .endif | 
|  | 231 | #endif | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 232 | .endm | 
|  | 233 |  | 
|  | 234 | /* | 
|  | 235 | * @src: source register (32 or 64 bit wide) | 
|  | 236 | * @sym: name of the symbol | 
|  | 237 | * @tmp: mandatory 64-bit scratch register to calculate the address | 
|  | 238 | *       while <src> needs to be preserved. | 
|  | 239 | */ | 
|  | 240 | .macro	str_l, src, sym, tmp | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 241 | #ifndef MODULE | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 242 | adrp	\tmp, \sym | 
|  | 243 | str	\src, [\tmp, :lo12:\sym] | 
| Ard Biesheuvel | f88f06e | 2017-01-11 14:54:53 +0000 | [diff] [blame] | 244 | #else | 
|  | 245 | adr_l	\tmp, \sym | 
|  | 246 | str	\src, [\tmp] | 
|  | 247 | #endif | 
| Ard Biesheuvel | b784a5d | 2015-03-04 19:45:38 +0100 | [diff] [blame] | 248 | .endm | 
|  | 249 |  | 
| James Morse | aa4d5d3 | 2015-12-10 10:22:39 +0000 | [diff] [blame] | 250 | /* | 
| Mark Rutland | 8b6c9c9 | 2016-11-03 20:23:12 +0000 | [diff] [blame] | 251 | * @dst: Result of per_cpu(sym, smp_processor_id()) | 
| James Morse | aa4d5d3 | 2015-12-10 10:22:39 +0000 | [diff] [blame] | 252 | * @sym: The name of the per-cpu variable | 
| James Morse | aa4d5d3 | 2015-12-10 10:22:39 +0000 | [diff] [blame] | 253 | * @tmp: scratch register | 
|  | 254 | */ | 
| Mark Rutland | 8b6c9c9 | 2016-11-03 20:23:12 +0000 | [diff] [blame] | 255 | .macro adr_this_cpu, dst, sym, tmp | 
|  | 256 | adr_l	\dst, \sym | 
| James Morse | aa4d5d3 | 2015-12-10 10:22:39 +0000 | [diff] [blame] | 257 | mrs	\tmp, tpidr_el1 | 
| Mark Rutland | 8b6c9c9 | 2016-11-03 20:23:12 +0000 | [diff] [blame] | 258 | add	\dst, \dst, \tmp | 
|  | 259 | .endm | 
|  | 260 |  | 
|  | 261 | /* | 
|  | 262 | * @dst: Result of READ_ONCE(per_cpu(sym, smp_processor_id())) | 
|  | 263 | * @sym: The name of the per-cpu variable | 
|  | 264 | * @tmp: scratch register | 
|  | 265 | */ | 
|  | 266 | .macro ldr_this_cpu dst, sym, tmp | 
|  | 267 | adr_l	\dst, \sym | 
|  | 268 | mrs	\tmp, tpidr_el1 | 
|  | 269 | ldr	\dst, [\dst, \tmp] | 
| James Morse | aa4d5d3 | 2015-12-10 10:22:39 +0000 | [diff] [blame] | 270 | .endm | 
|  | 271 |  | 
| Ard Biesheuvel | 2079184 | 2015-10-08 20:02:03 +0100 | [diff] [blame] | 272 | /* | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 273 | * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm) | 
|  | 274 | */ | 
|  | 275 | .macro	vma_vm_mm, rd, rn | 
|  | 276 | ldr	\rd, [\rn, #VMA_VM_MM] | 
|  | 277 | .endm | 
|  | 278 |  | 
|  | 279 | /* | 
|  | 280 | * mmid - get context id from mm pointer (mm->context.id) | 
|  | 281 | */ | 
|  | 282 | .macro	mmid, rd, rn | 
|  | 283 | ldr	\rd, [\rn, #MM_CONTEXT_ID] | 
|  | 284 | .endm | 
| Suzuki K Poulose | 116c81f | 2016-09-09 14:07:16 +0100 | [diff] [blame] | 285 | /* | 
|  | 286 | * read_ctr - read CTR_EL0. If the system has mismatched | 
|  | 287 | * cache line sizes, provide the system wide safe value | 
|  | 288 | * from arm64_ftr_reg_ctrel0.sys_val | 
|  | 289 | */ | 
|  | 290 | .macro	read_ctr, reg | 
|  | 291 | alternative_if_not ARM64_MISMATCHED_CACHE_LINE_SIZE | 
|  | 292 | mrs	\reg, ctr_el0			// read CTR | 
|  | 293 | nop | 
|  | 294 | alternative_else | 
|  | 295 | ldr_l	\reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL | 
|  | 296 | alternative_endif | 
|  | 297 | .endm | 
|  | 298 |  | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 299 |  | 
|  | 300 | /* | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 301 | * raw_dcache_line_size - get the minimum D-cache line size on this CPU | 
|  | 302 | * from the CTR register. | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 303 | */ | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 304 | .macro	raw_dcache_line_size, reg, tmp | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 305 | mrs	\tmp, ctr_el0			// read CTR | 
|  | 306 | ubfm	\tmp, \tmp, #16, #19		// cache line size encoding | 
|  | 307 | mov	\reg, #4			// bytes per word | 
|  | 308 | lsl	\reg, \reg, \tmp		// actual cache line size | 
|  | 309 | .endm | 
|  | 310 |  | 
|  | 311 | /* | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 312 | * dcache_line_size - get the safe D-cache line size across all CPUs | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 313 | */ | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 314 | .macro	dcache_line_size, reg, tmp | 
| Suzuki K Poulose | 116c81f | 2016-09-09 14:07:16 +0100 | [diff] [blame] | 315 | read_ctr	\tmp | 
|  | 316 | ubfm		\tmp, \tmp, #16, #19	// cache line size encoding | 
|  | 317 | mov		\reg, #4		// bytes per word | 
|  | 318 | lsl		\reg, \reg, \tmp	// actual cache line size | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 319 | .endm | 
|  | 320 |  | 
|  | 321 | /* | 
|  | 322 | * raw_icache_line_size - get the minimum I-cache line size on this CPU | 
|  | 323 | * from the CTR register. | 
|  | 324 | */ | 
|  | 325 | .macro	raw_icache_line_size, reg, tmp | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 326 | mrs	\tmp, ctr_el0			// read CTR | 
|  | 327 | and	\tmp, \tmp, #0xf		// cache line size encoding | 
|  | 328 | mov	\reg, #4			// bytes per word | 
|  | 329 | lsl	\reg, \reg, \tmp		// actual cache line size | 
|  | 330 | .endm | 
|  | 331 |  | 
|  | 332 | /* | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 333 | * icache_line_size - get the safe I-cache line size across all CPUs | 
|  | 334 | */ | 
|  | 335 | .macro	icache_line_size, reg, tmp | 
| Suzuki K Poulose | 116c81f | 2016-09-09 14:07:16 +0100 | [diff] [blame] | 336 | read_ctr	\tmp | 
|  | 337 | and		\tmp, \tmp, #0xf	// cache line size encoding | 
|  | 338 | mov		\reg, #4		// bytes per word | 
|  | 339 | lsl		\reg, \reg, \tmp	// actual cache line size | 
| Suzuki K Poulose | 072f0a6 | 2016-09-09 14:07:14 +0100 | [diff] [blame] | 340 | .endm | 
|  | 341 |  | 
|  | 342 | /* | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 343 | * tcr_set_idmap_t0sz - update TCR.T0SZ so that we can load the ID map | 
|  | 344 | */ | 
|  | 345 | .macro	tcr_set_idmap_t0sz, valreg, tmpreg | 
|  | 346 | #ifndef CONFIG_ARM64_VA_BITS_48 | 
|  | 347 | ldr_l	\tmpreg, idmap_t0sz | 
|  | 348 | bfi	\valreg, \tmpreg, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH | 
|  | 349 | #endif | 
|  | 350 | .endm | 
|  | 351 |  | 
|  | 352 | /* | 
|  | 353 | * Macro to perform a data cache maintenance for the interval | 
|  | 354 | * [kaddr, kaddr + size) | 
|  | 355 | * | 
|  | 356 | * 	op:		operation passed to dc instruction | 
|  | 357 | * 	domain:		domain used in dsb instruciton | 
|  | 358 | * 	kaddr:		starting virtual address of the region | 
|  | 359 | * 	size:		size of the region | 
|  | 360 | * 	Corrupts:	kaddr, size, tmp1, tmp2 | 
|  | 361 | */ | 
|  | 362 | .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2 | 
|  | 363 | dcache_line_size \tmp1, \tmp2 | 
|  | 364 | add	\size, \kaddr, \size | 
|  | 365 | sub	\tmp2, \tmp1, #1 | 
|  | 366 | bic	\kaddr, \kaddr, \tmp2 | 
| Andre Przywara | 823066d | 2016-06-28 18:07:29 +0100 | [diff] [blame] | 367 | 9998: | 
|  | 368 | .if	(\op == cvau || \op == cvac) | 
|  | 369 | alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE | 
|  | 370 | dc	\op, \kaddr | 
|  | 371 | alternative_else | 
|  | 372 | dc	civac, \kaddr | 
|  | 373 | alternative_endif | 
|  | 374 | .else | 
|  | 375 | dc	\op, \kaddr | 
|  | 376 | .endif | 
| Geoff Levand | 7b7293a | 2016-04-27 17:47:00 +0100 | [diff] [blame] | 377 | add	\kaddr, \kaddr, \tmp1 | 
|  | 378 | cmp	\kaddr, \size | 
|  | 379 | b.lo	9998b | 
|  | 380 | dsb	\domain | 
|  | 381 | .endm | 
|  | 382 |  | 
|  | 383 | /* | 
|  | 384 | * reset_pmuserenr_el0 - reset PMUSERENR_EL0 if PMUv3 present | 
|  | 385 | */ | 
|  | 386 | .macro	reset_pmuserenr_el0, tmpreg | 
|  | 387 | mrs	\tmpreg, id_aa64dfr0_el1	// Check ID_AA64DFR0_EL1 PMUVer | 
|  | 388 | sbfx	\tmpreg, \tmpreg, #8, #4 | 
|  | 389 | cmp	\tmpreg, #1			// Skip if no PMU present | 
|  | 390 | b.lt	9000f | 
|  | 391 | msr	pmuserenr_el0, xzr		// Disable PMU access from EL0 | 
|  | 392 | 9000: | 
|  | 393 | .endm | 
|  | 394 |  | 
|  | 395 | /* | 
| Geoff Levand | 5003dbd | 2016-04-27 17:47:10 +0100 | [diff] [blame] | 396 | * copy_page - copy src to dest using temp registers t1-t8 | 
|  | 397 | */ | 
|  | 398 | .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req | 
|  | 399 | 9998:	ldp	\t1, \t2, [\src] | 
|  | 400 | ldp	\t3, \t4, [\src, #16] | 
|  | 401 | ldp	\t5, \t6, [\src, #32] | 
|  | 402 | ldp	\t7, \t8, [\src, #48] | 
|  | 403 | add	\src, \src, #64 | 
|  | 404 | stnp	\t1, \t2, [\dest] | 
|  | 405 | stnp	\t3, \t4, [\dest, #16] | 
|  | 406 | stnp	\t5, \t6, [\dest, #32] | 
|  | 407 | stnp	\t7, \t8, [\dest, #48] | 
|  | 408 | add	\dest, \dest, #64 | 
|  | 409 | tst	\src, #(PAGE_SIZE - 1) | 
|  | 410 | b.ne	9998b | 
|  | 411 | .endm | 
|  | 412 |  | 
|  | 413 | /* | 
| Ard Biesheuvel | 2079184 | 2015-10-08 20:02:03 +0100 | [diff] [blame] | 414 | * Annotate a function as position independent, i.e., safe to be called before | 
|  | 415 | * the kernel virtual mapping is activated. | 
|  | 416 | */ | 
|  | 417 | #define ENDPIPROC(x)			\ | 
|  | 418 | .globl	__pi_##x;		\ | 
|  | 419 | .type 	__pi_##x, %function;	\ | 
|  | 420 | .set	__pi_##x, x;		\ | 
|  | 421 | .size	__pi_##x, . - x;	\ | 
|  | 422 | ENDPROC(x) | 
|  | 423 |  | 
| Ard Biesheuvel | 6ad1fe5 | 2015-12-26 13:48:02 +0100 | [diff] [blame] | 424 | /* | 
|  | 425 | * Emit a 64-bit absolute little endian symbol reference in a way that | 
|  | 426 | * ensures that it will be resolved at build time, even when building a | 
|  | 427 | * PIE binary. This requires cooperation from the linker script, which | 
|  | 428 | * must emit the lo32/hi32 halves individually. | 
|  | 429 | */ | 
|  | 430 | .macro	le64sym, sym | 
|  | 431 | .long	\sym\()_lo32 | 
|  | 432 | .long	\sym\()_hi32 | 
|  | 433 | .endm | 
|  | 434 |  | 
| Ard Biesheuvel | 30b5ba5 | 2016-04-18 17:09:44 +0200 | [diff] [blame] | 435 | /* | 
|  | 436 | * mov_q - move an immediate constant into a 64-bit register using | 
|  | 437 | *         between 2 and 4 movz/movk instructions (depending on the | 
|  | 438 | *         magnitude and sign of the operand) | 
|  | 439 | */ | 
|  | 440 | .macro	mov_q, reg, val | 
|  | 441 | .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff) | 
|  | 442 | movz	\reg, :abs_g1_s:\val | 
|  | 443 | .else | 
|  | 444 | .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff) | 
|  | 445 | movz	\reg, :abs_g2_s:\val | 
|  | 446 | .else | 
|  | 447 | movz	\reg, :abs_g3:\val | 
|  | 448 | movk	\reg, :abs_g2_nc:\val | 
|  | 449 | .endif | 
|  | 450 | movk	\reg, :abs_g1_nc:\val | 
|  | 451 | .endif | 
|  | 452 | movk	\reg, :abs_g0_nc:\val | 
|  | 453 | .endm | 
|  | 454 |  | 
| Catalin Marinas | 2375913 | 2016-07-01 15:48:55 +0100 | [diff] [blame] | 455 | /* | 
| Catalin Marinas | 005bf1a | 2016-07-01 16:53:00 +0100 | [diff] [blame] | 456 | * Return the current thread_info. | 
|  | 457 | */ | 
|  | 458 | .macro	get_thread_info, rd | 
|  | 459 | mrs	\rd, sp_el0 | 
|  | 460 | .endm | 
|  | 461 |  | 
| Will Deacon | 4025fe1 | 2018-04-03 12:09:20 +0100 | [diff] [blame] | 462 | .macro	pte_to_phys, phys, pte | 
|  | 463 | and	\phys, \pte, #(((1 << (48 - PAGE_SHIFT)) - 1) << PAGE_SHIFT) | 
|  | 464 | .endm | 
| Mark Rutland | 965924e | 2018-04-12 12:11:11 +0100 | [diff] [blame] | 465 |  | 
| Suzuki K Poulose | b8c3208 | 2018-03-26 15:12:49 +0100 | [diff] [blame] | 466 | /* | 
|  | 467 | * Check the MIDR_EL1 of the current CPU for a given model and a range of | 
|  | 468 | * variant/revision. See asm/cputype.h for the macros used below. | 
|  | 469 | * | 
|  | 470 | *	model:		MIDR_CPU_MODEL of CPU | 
|  | 471 | *	rv_min:		Minimum of MIDR_CPU_VAR_REV() | 
|  | 472 | *	rv_max:		Maximum of MIDR_CPU_VAR_REV() | 
|  | 473 | *	res:		Result register. | 
|  | 474 | *	tmp1, tmp2, tmp3: Temporary registers | 
|  | 475 | * | 
|  | 476 | * Corrupts: res, tmp1, tmp2, tmp3 | 
|  | 477 | * Returns:  0, if the CPU id doesn't match. Non-zero otherwise | 
|  | 478 | */ | 
|  | 479 | .macro	cpu_midr_match model, rv_min, rv_max, res, tmp1, tmp2, tmp3 | 
|  | 480 | mrs		\res, midr_el1 | 
|  | 481 | mov_q		\tmp1, (MIDR_REVISION_MASK | MIDR_VARIANT_MASK) | 
|  | 482 | mov_q		\tmp2, MIDR_CPU_MODEL_MASK | 
|  | 483 | and		\tmp3, \res, \tmp2	// Extract model | 
|  | 484 | and		\tmp1, \res, \tmp1	// rev & variant | 
|  | 485 | mov_q		\tmp2, \model | 
|  | 486 | cmp		\tmp3, \tmp2 | 
|  | 487 | cset		\res, eq | 
|  | 488 | cbz		\res, .Ldone\@		// Model matches ? | 
|  | 489 |  | 
|  | 490 | .if (\rv_min != 0)			// Skip min check if rv_min == 0 | 
|  | 491 | mov_q		\tmp3, \rv_min | 
|  | 492 | cmp		\tmp1, \tmp3 | 
|  | 493 | cset		\res, ge | 
|  | 494 | .endif					// \rv_min != 0 | 
|  | 495 | /* Skip rv_max check if rv_min == rv_max && rv_min != 0 */ | 
|  | 496 | .if ((\rv_min != \rv_max) || \rv_min == 0) | 
|  | 497 | mov_q		\tmp2, \rv_max | 
|  | 498 | cmp		\tmp1, \tmp2 | 
|  | 499 | cset		\tmp2, le | 
|  | 500 | and		\res, \res, \tmp2 | 
|  | 501 | .endif | 
|  | 502 | .Ldone\@: | 
|  | 503 | .endm | 
|  | 504 |  | 
| Marc Zyngier | f3e3927 | 2015-02-20 13:53:13 +0000 | [diff] [blame] | 505 | #endif	/* __ASM_ASSEMBLER_H */ |