David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu> |
| 3 | */ |
| 4 | #ifndef _ASM_POWERPC_SWITCH_TO_H |
| 5 | #define _ASM_POWERPC_SWITCH_TO_H |
| 6 | |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 7 | #include <asm/reg.h> |
| 8 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 9 | struct thread_struct; |
| 10 | struct task_struct; |
| 11 | struct pt_regs; |
| 12 | |
| 13 | extern struct task_struct *__switch_to(struct task_struct *, |
| 14 | struct task_struct *); |
| 15 | #define switch_to(prev, next, last) ((last) = __switch_to((prev), (next))) |
| 16 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 17 | extern struct task_struct *_switch(struct thread_struct *prev, |
| 18 | struct thread_struct *next); |
| 19 | |
Scott Wood | f5f9721 | 2013-11-22 15:52:29 -0600 | [diff] [blame] | 20 | extern void switch_booke_debug_regs(struct debug_reg *new_debug); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 21 | |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame^] | 22 | extern int emulate_altivec(struct pt_regs *); |
| 23 | |
| 24 | extern void flush_all_to_thread(struct task_struct *); |
| 25 | extern void giveup_all(struct task_struct *); |
| 26 | |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 27 | #ifdef CONFIG_PPC_FPU |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame^] | 28 | extern void enable_kernel_fp(void); |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 29 | extern void flush_fp_to_thread(struct task_struct *); |
Kevin Hao | 5f20be4 | 2013-07-14 17:02:06 +0800 | [diff] [blame] | 30 | extern void giveup_fpu(struct task_struct *); |
Anton Blanchard | 98da581 | 2015-10-29 11:44:01 +1100 | [diff] [blame] | 31 | extern void __giveup_fpu(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 32 | static inline void disable_kernel_fp(void) |
| 33 | { |
| 34 | msr_check_and_clear(MSR_FP); |
| 35 | } |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 36 | #else |
| 37 | static inline void flush_fp_to_thread(struct task_struct *t) { } |
| 38 | #endif |
| 39 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 40 | #ifdef CONFIG_ALTIVEC |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame^] | 41 | extern void enable_kernel_altivec(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 42 | extern void flush_altivec_to_thread(struct task_struct *); |
Anton Blanchard | 8cd3c23 | 2012-04-15 20:54:59 +0000 | [diff] [blame] | 43 | extern void giveup_altivec(struct task_struct *); |
Anton Blanchard | 98da581 | 2015-10-29 11:44:01 +1100 | [diff] [blame] | 44 | extern void __giveup_altivec(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 45 | static inline void disable_kernel_altivec(void) |
| 46 | { |
| 47 | msr_check_and_clear(MSR_VEC); |
| 48 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 49 | #endif |
| 50 | |
| 51 | #ifdef CONFIG_VSX |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame^] | 52 | extern void enable_kernel_vsx(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 53 | extern void flush_vsx_to_thread(struct task_struct *); |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame^] | 54 | extern void giveup_vsx(struct task_struct *); |
| 55 | extern void __giveup_vsx(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 56 | static inline void disable_kernel_vsx(void) |
| 57 | { |
| 58 | msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); |
| 59 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 60 | #endif |
| 61 | |
| 62 | #ifdef CONFIG_SPE |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame^] | 63 | extern void enable_kernel_spe(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 64 | extern void flush_spe_to_thread(struct task_struct *); |
Anton Blanchard | 98da581 | 2015-10-29 11:44:01 +1100 | [diff] [blame] | 65 | extern void giveup_spe(struct task_struct *); |
| 66 | extern void __giveup_spe(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 67 | static inline void disable_kernel_spe(void) |
| 68 | { |
| 69 | msr_check_and_clear(MSR_SPE); |
| 70 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 71 | #endif |
| 72 | |
Michael Ellerman | 330a1eb | 2013-06-28 18:15:16 +1000 | [diff] [blame] | 73 | static inline void clear_task_ebb(struct task_struct *t) |
| 74 | { |
| 75 | #ifdef CONFIG_PPC_BOOK3S_64 |
| 76 | /* EBB perf events are not inherited, so clear all EBB state. */ |
Michael Ellerman | 3df48c9 | 2014-06-10 16:46:21 +1000 | [diff] [blame] | 77 | t->thread.ebbrr = 0; |
| 78 | t->thread.ebbhr = 0; |
Michael Ellerman | 330a1eb | 2013-06-28 18:15:16 +1000 | [diff] [blame] | 79 | t->thread.bescr = 0; |
| 80 | t->thread.mmcr2 = 0; |
| 81 | t->thread.mmcr0 = 0; |
| 82 | t->thread.siar = 0; |
| 83 | t->thread.sdar = 0; |
| 84 | t->thread.sier = 0; |
| 85 | t->thread.used_ebb = 0; |
| 86 | #endif |
| 87 | } |
| 88 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 89 | #endif /* _ASM_POWERPC_SWITCH_TO_H */ |