David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright IBM Corp. 1999, 2009 |
| 3 | * |
| 4 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> |
| 5 | */ |
| 6 | |
| 7 | #ifndef __ASM_SWITCH_TO_H |
| 8 | #define __ASM_SWITCH_TO_H |
| 9 | |
| 10 | #include <linux/thread_info.h> |
Hendrik Brueckner | b075390 | 2015-10-06 12:25:59 +0200 | [diff] [blame] | 11 | #include <asm/fpu/api.h> |
Heiko Carstens | bee5c28 | 2013-08-20 10:24:12 +0200 | [diff] [blame] | 12 | #include <asm/ptrace.h> |
Martin Schwidefsky | 916cda1 | 2016-01-26 14:10:34 +0100 | [diff] [blame] | 13 | #include <asm/guarded_storage.h> |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 14 | |
| 15 | extern struct task_struct *__switch_to(void *, void *); |
Michael Mueller | 64597f9 | 2013-07-02 22:58:26 +0200 | [diff] [blame] | 16 | extern void update_cr_regs(struct task_struct *task); |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 17 | |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 18 | static inline void save_access_regs(unsigned int *acrs) |
| 19 | { |
Heiko Carstens | bee5c28 | 2013-08-20 10:24:12 +0200 | [diff] [blame] | 20 | typedef struct { int _[NUM_ACRS]; } acrstype; |
| 21 | |
| 22 | asm volatile("stam 0,15,%0" : "=Q" (*(acrstype *)acrs)); |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 23 | } |
| 24 | |
| 25 | static inline void restore_access_regs(unsigned int *acrs) |
| 26 | { |
Heiko Carstens | bee5c28 | 2013-08-20 10:24:12 +0200 | [diff] [blame] | 27 | typedef struct { int _[NUM_ACRS]; } acrstype; |
| 28 | |
| 29 | asm volatile("lam 0,15,%0" : : "Q" (*(acrstype *)acrs)); |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 30 | } |
| 31 | |
| 32 | #define switch_to(prev,next,last) do { \ |
| 33 | if (prev->mm) { \ |
Hendrik Brueckner | d0164ee | 2015-06-29 16:43:06 +0200 | [diff] [blame] | 34 | save_fpu_regs(); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 35 | save_access_regs(&prev->thread.acrs[0]); \ |
Jan Glauber | e4b8b3f | 2012-07-31 10:52:05 +0200 | [diff] [blame] | 36 | save_ri_cb(prev->thread.ri_cb); \ |
Martin Schwidefsky | 916cda1 | 2016-01-26 14:10:34 +0100 | [diff] [blame] | 37 | save_gs_cb(prev->thread.gs_cb); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 38 | } \ |
| 39 | if (next->mm) { \ |
Martin Schwidefsky | 8070361 | 2014-10-06 17:53:53 +0200 | [diff] [blame] | 40 | update_cr_regs(next); \ |
Hendrik Brueckner | 9977e88 | 2015-06-10 12:53:42 +0200 | [diff] [blame] | 41 | set_cpu_flag(CIF_FPU); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 42 | restore_access_regs(&next->thread.acrs[0]); \ |
Jan Glauber | e4b8b3f | 2012-07-31 10:52:05 +0200 | [diff] [blame] | 43 | restore_ri_cb(next->thread.ri_cb, prev->thread.ri_cb); \ |
Martin Schwidefsky | 916cda1 | 2016-01-26 14:10:34 +0100 | [diff] [blame] | 44 | restore_gs_cb(next->thread.gs_cb); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 45 | } \ |
| 46 | prev = __switch_to(prev,next); \ |
| 47 | } while (0) |
| 48 | |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 49 | #endif /* __ASM_SWITCH_TO_H */ |