Prevent speculative execution past ERET

Even though ERET always causes a jump to another address, aarch64 CPUs
speculatively execute following instructions as if the ERET
instruction was not a jump instruction.
The speculative execution does not cross privilege-levels (to the jump
target as one would expect), but it continues on the kernel privilege
level as if the ERET instruction did not change the control flow -
thus execution anything that is accidentally linked after the ERET
instruction. Later, the results of this speculative execution are
always architecturally discarded, however they can leak data using
microarchitectural side channels. This speculative execution is very
reliable (seems to be unconditional) and it manages to complete even
relatively performance-heavy operations (e.g. multiple dependent
fetches from uncached memory).

This was fixed in Linux, FreeBSD, OpenBSD and Optee OS:
https://github.com/torvalds/linux/commit/679db70801da9fda91d26caf13bf5b5ccc74e8e8
https://github.com/freebsd/freebsd/commit/29fb48ace4186a41c409fde52bcf4216e9e50b61
https://github.com/openbsd/src/commit/3a08873ece1cb28ace89fd65e8f3c1375cc98de2
https://github.com/OP-TEE/optee_os/commit/abfd092aa19f9c0251e3d5551e2d68a9ebcfec8a

It is demonstrated in a SafeSide example:
https://github.com/google/safeside/blob/master/demos/eret_hvc_smc_wrapper.cc
https://github.com/google/safeside/blob/master/kernel_modules/kmod_eret_hvc_smc/eret_hvc_smc_module.c

Signed-off-by: Anthony Steinhauser <asteinhauser@google.com>
Change-Id: Iead39b0b9fb4b8d8b5609daaa8be81497ba63a0f
diff --git a/bl1/aarch64/bl1_exceptions.S b/bl1/aarch64/bl1_exceptions.S
index 9bba6ee..9dc9e6c 100644
--- a/bl1/aarch64/bl1_exceptions.S
+++ b/bl1/aarch64/bl1_exceptions.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -202,7 +202,7 @@
 	ldp	x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)]
 	ldp	x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)]
 	ldp	x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)]
-	eret
+	exception_return
 endfunc smc_handler64
 
 unexpected_sync_exception:
diff --git a/bl2/aarch64/bl2_el3_entrypoint.S b/bl2/aarch64/bl2_el3_entrypoint.S
index f97121e..2ca6acf 100644
--- a/bl2/aarch64/bl2_el3_entrypoint.S
+++ b/bl2/aarch64/bl2_el3_entrypoint.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -95,5 +95,5 @@
 	ldp	x4, x5, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x20)]
 	ldp	x2, x3, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x10)]
 	ldp	x0, x1, [x20, #(ENTRY_POINT_INFO_ARGS_OFFSET + 0x0)]
-	eret
+	exception_return
 endfunc bl2_run_next_image
diff --git a/bl31/aarch64/runtime_exceptions.S b/bl31/aarch64/runtime_exceptions.S
index 51f5b7b..7f739a9 100644
--- a/bl31/aarch64/runtime_exceptions.S
+++ b/bl31/aarch64/runtime_exceptions.S
@@ -456,7 +456,7 @@
 smc_prohibited:
 	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
 	mov	x0, #SMC_UNK
-	eret
+	exception_return
 
 #if DEBUG
 rt_svc_fw_critical_error:
diff --git a/bl32/tsp/aarch64/tsp_exceptions.S b/bl32/tsp/aarch64/tsp_exceptions.S
index ad4b648..4c6a56a 100644
--- a/bl32/tsp/aarch64/tsp_exceptions.S
+++ b/bl32/tsp/aarch64/tsp_exceptions.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -64,7 +64,7 @@
 	smc	#0
 interrupt_exit_\label:
 	restore_caller_regs_and_lr
-	eret
+	exception_return
 	.endm
 
 	.globl	tsp_exceptions
diff --git a/include/arch/aarch64/asm_macros.S b/include/arch/aarch64/asm_macros.S
index 79e0ad7..a7d5a3d 100644
--- a/include/arch/aarch64/asm_macros.S
+++ b/include/arch/aarch64/asm_macros.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -218,4 +218,13 @@
 	ret
 	.endm
 
+	/*
+	 * Macro for mitigating against speculative execution beyond ERET.
+	 */
+	.macro exception_return
+	eret
+	dsb nsh
+	isb
+	.endm
+
 #endif /* ASM_MACROS_S */
diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S
index 868667e..baefa46 100644
--- a/lib/cpus/aarch64/cortex_a76.S
+++ b/lib/cpus/aarch64/cortex_a76.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -85,7 +85,7 @@
 		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
 		csel	x3, x3, x1, eq
 		msr	CORTEX_A76_CPUACTLR2_EL1, x3
-		eret	/* ERET implies ISB */
+		exception_return /* exception_return contains ISB */
 	.endif
 1:
 	/*
diff --git a/lib/cpus/aarch64/neoverse_n1.S b/lib/cpus/aarch64/neoverse_n1.S
index faf53a8..d058d98 100644
--- a/lib/cpus/aarch64/neoverse_n1.S
+++ b/lib/cpus/aarch64/neoverse_n1.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -553,7 +553,7 @@
 	 */
 	esb
 #endif
-	eret
+	exception_return
 1:
 	ret
 endfunc neoverse_n1_errata_ic_trap_handler
diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
index 9277cc6..5134ee3 100644
--- a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -48,7 +48,7 @@
 		ccmp	w0, w1, #0, eq
 		/* Static predictor will predict a fall through */
 		bne	1f
-		eret
+		exception_return
 1:
 	.endif
 
diff --git a/lib/el3_runtime/aarch64/context.S b/lib/el3_runtime/aarch64/context.S
index 1bbd610..9bd25ba 100644
--- a/lib/el3_runtime/aarch64/context.S
+++ b/lib/el3_runtime/aarch64/context.S
@@ -534,6 +534,6 @@
 	 */
 	esb
 #endif
-	eret
+	exception_return
 
 endfunc el3_exit
diff --git a/plat/renesas/rcar/aarch64/plat_helpers.S b/plat/renesas/rcar/aarch64/plat_helpers.S
index 61dd622..138d988 100644
--- a/plat/renesas/rcar/aarch64/plat_helpers.S
+++ b/plat/renesas/rcar/aarch64/plat_helpers.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
  * Copyright (c) 2015-2019, Renesas Electronics Corporation. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
@@ -189,7 +189,7 @@
 	ldp	x0, x1, [x20, #ENTRY_POINT_INFO_PC_OFFSET]
 	msr	elr_el3, x0
 	msr	spsr_el3, x1
-	eret
+	exception_return
 endfunc bl2_enter_bl31
 
 	/* -----------------------------------------------------
diff --git a/services/std_svc/spm_mm/aarch64/spm_mm_shim_exceptions.S b/services/std_svc/spm_mm/aarch64/spm_mm_shim_exceptions.S
index dab6150..be4084c 100644
--- a/services/std_svc/spm_mm/aarch64/spm_mm_shim_exceptions.S
+++ b/services/std_svc/spm_mm/aarch64/spm_mm_shim_exceptions.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -87,7 +87,7 @@
 do_smc:
 	mrs	x30, tpidr_el1
 	smc	#0
-	eret
+	exception_return
 
 	/* AArch64 system instructions trap are handled as a panic for now */
 handle_sys_trap: