David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright IBM Corp. 1999, 2009 |
| 3 | * |
| 4 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> |
| 5 | */ |
| 6 | |
| 7 | #ifndef __ASM_SWITCH_TO_H |
| 8 | #define __ASM_SWITCH_TO_H |
| 9 | |
| 10 | #include <linux/thread_info.h> |
Hendrik Brueckner | 904818e | 2015-06-11 15:33:54 +0200 | [diff] [blame] | 11 | #include <asm/fpu-internal.h> |
Heiko Carstens | bee5c28 | 2013-08-20 10:24:12 +0200 | [diff] [blame] | 12 | #include <asm/ptrace.h> |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 13 | |
| 14 | extern struct task_struct *__switch_to(void *, void *); |
Michael Mueller | 64597f9 | 2013-07-02 22:58:26 +0200 | [diff] [blame] | 15 | extern void update_cr_regs(struct task_struct *task); |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 16 | |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 17 | static inline void save_access_regs(unsigned int *acrs) |
| 18 | { |
Heiko Carstens | bee5c28 | 2013-08-20 10:24:12 +0200 | [diff] [blame] | 19 | typedef struct { int _[NUM_ACRS]; } acrstype; |
| 20 | |
| 21 | asm volatile("stam 0,15,%0" : "=Q" (*(acrstype *)acrs)); |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 22 | } |
| 23 | |
| 24 | static inline void restore_access_regs(unsigned int *acrs) |
| 25 | { |
Heiko Carstens | bee5c28 | 2013-08-20 10:24:12 +0200 | [diff] [blame] | 26 | typedef struct { int _[NUM_ACRS]; } acrstype; |
| 27 | |
| 28 | asm volatile("lam 0,15,%0" : : "Q" (*(acrstype *)acrs)); |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 29 | } |
| 30 | |
| 31 | #define switch_to(prev,next,last) do { \ |
| 32 | if (prev->mm) { \ |
Hendrik Brueckner | d0164ee | 2015-06-29 16:43:06 +0200 | [diff] [blame] | 33 | save_fpu_regs(); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 34 | save_access_regs(&prev->thread.acrs[0]); \ |
Jan Glauber | e4b8b3f | 2012-07-31 10:52:05 +0200 | [diff] [blame] | 35 | save_ri_cb(prev->thread.ri_cb); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 36 | } \ |
| 37 | if (next->mm) { \ |
Martin Schwidefsky | 8070361 | 2014-10-06 17:53:53 +0200 | [diff] [blame] | 38 | update_cr_regs(next); \ |
Hendrik Brueckner | 9977e88 | 2015-06-10 12:53:42 +0200 | [diff] [blame] | 39 | set_cpu_flag(CIF_FPU); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 40 | restore_access_regs(&next->thread.acrs[0]); \ |
Jan Glauber | e4b8b3f | 2012-07-31 10:52:05 +0200 | [diff] [blame] | 41 | restore_ri_cb(next->thread.ri_cb, prev->thread.ri_cb); \ |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 42 | } \ |
| 43 | prev = __switch_to(prev,next); \ |
| 44 | } while (0) |
| 45 | |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 46 | #endif /* __ASM_SWITCH_TO_H */ |