blob: d62e7a69605f8ea5a36d251763538e9c894b50b3 [file] [log] [blame]
David Howellsa0616cd2012-03-28 18:30:02 +01001/*
2 * Copyright IBM Corp. 1999, 2009
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
5 */
6
7#ifndef __ASM_SWITCH_TO_H
8#define __ASM_SWITCH_TO_H
9
10#include <linux/thread_info.h>
Heiko Carstensbee5c282013-08-20 10:24:12 +020011#include <asm/ptrace.h>
David Howellsa0616cd2012-03-28 18:30:02 +010012
13extern struct task_struct *__switch_to(void *, void *);
Michael Mueller64597f92013-07-02 22:58:26 +020014extern void update_cr_regs(struct task_struct *task);
David Howellsa0616cd2012-03-28 18:30:02 +010015
Martin Schwidefsky4725c862013-10-15 16:08:34 +020016static inline int test_fp_ctl(u32 fpc)
David Howellsa0616cd2012-03-28 18:30:02 +010017{
Martin Schwidefsky4725c862013-10-15 16:08:34 +020018 u32 orig_fpc;
19 int rc;
20
David Howellsa0616cd2012-03-28 18:30:02 +010021 asm volatile(
Martin Schwidefsky4725c862013-10-15 16:08:34 +020022 " efpc %1\n"
23 " sfpc %2\n"
24 "0: sfpc %1\n"
25 " la %0,0\n"
26 "1:\n"
27 EX_TABLE(0b,1b)
28 : "=d" (rc), "=d" (orig_fpc)
29 : "d" (fpc), "0" (-EINVAL));
30 return rc;
David Howellsa0616cd2012-03-28 18:30:02 +010031}
32
Martin Schwidefsky4725c862013-10-15 16:08:34 +020033static inline void save_fp_ctl(u32 *fpc)
David Howellsa0616cd2012-03-28 18:30:02 +010034{
David Howellsa0616cd2012-03-28 18:30:02 +010035 asm volatile(
Martin Schwidefsky4725c862013-10-15 16:08:34 +020036 " stfpc %0\n"
37 : "+Q" (*fpc));
38}
39
40static inline int restore_fp_ctl(u32 *fpc)
41{
42 int rc;
43
Martin Schwidefsky4725c862013-10-15 16:08:34 +020044 asm volatile(
Martin Schwidefsky9f867452014-07-15 10:41:37 +020045 " lfpc %1\n"
46 "0: la %0,0\n"
Martin Schwidefsky4725c862013-10-15 16:08:34 +020047 "1:\n"
48 EX_TABLE(0b,1b)
49 : "=d" (rc) : "Q" (*fpc), "0" (-EINVAL));
50 return rc;
51}
52
53static inline void save_fp_regs(freg_t *fprs)
54{
55 asm volatile("std 0,%0" : "=Q" (fprs[0]));
56 asm volatile("std 2,%0" : "=Q" (fprs[2]));
57 asm volatile("std 4,%0" : "=Q" (fprs[4]));
58 asm volatile("std 6,%0" : "=Q" (fprs[6]));
Martin Schwidefsky4725c862013-10-15 16:08:34 +020059 asm volatile("std 1,%0" : "=Q" (fprs[1]));
60 asm volatile("std 3,%0" : "=Q" (fprs[3]));
61 asm volatile("std 5,%0" : "=Q" (fprs[5]));
62 asm volatile("std 7,%0" : "=Q" (fprs[7]));
63 asm volatile("std 8,%0" : "=Q" (fprs[8]));
64 asm volatile("std 9,%0" : "=Q" (fprs[9]));
65 asm volatile("std 10,%0" : "=Q" (fprs[10]));
66 asm volatile("std 11,%0" : "=Q" (fprs[11]));
67 asm volatile("std 12,%0" : "=Q" (fprs[12]));
68 asm volatile("std 13,%0" : "=Q" (fprs[13]));
69 asm volatile("std 14,%0" : "=Q" (fprs[14]));
70 asm volatile("std 15,%0" : "=Q" (fprs[15]));
71}
72
73static inline void restore_fp_regs(freg_t *fprs)
74{
75 asm volatile("ld 0,%0" : : "Q" (fprs[0]));
76 asm volatile("ld 2,%0" : : "Q" (fprs[2]));
77 asm volatile("ld 4,%0" : : "Q" (fprs[4]));
78 asm volatile("ld 6,%0" : : "Q" (fprs[6]));
Martin Schwidefsky4725c862013-10-15 16:08:34 +020079 asm volatile("ld 1,%0" : : "Q" (fprs[1]));
80 asm volatile("ld 3,%0" : : "Q" (fprs[3]));
81 asm volatile("ld 5,%0" : : "Q" (fprs[5]));
82 asm volatile("ld 7,%0" : : "Q" (fprs[7]));
83 asm volatile("ld 8,%0" : : "Q" (fprs[8]));
84 asm volatile("ld 9,%0" : : "Q" (fprs[9]));
85 asm volatile("ld 10,%0" : : "Q" (fprs[10]));
86 asm volatile("ld 11,%0" : : "Q" (fprs[11]));
87 asm volatile("ld 12,%0" : : "Q" (fprs[12]));
88 asm volatile("ld 13,%0" : : "Q" (fprs[13]));
89 asm volatile("ld 14,%0" : : "Q" (fprs[14]));
90 asm volatile("ld 15,%0" : : "Q" (fprs[15]));
David Howellsa0616cd2012-03-28 18:30:02 +010091}
92
Martin Schwidefsky80703612014-10-06 17:53:53 +020093static inline void save_vx_regs(__vector128 *vxrs)
94{
95 typedef struct { __vector128 _[__NUM_VXRS]; } addrtype;
96
97 asm volatile(
98 " la 1,%0\n"
99 " .word 0xe70f,0x1000,0x003e\n" /* vstm 0,15,0(1) */
100 " .word 0xe70f,0x1100,0x0c3e\n" /* vstm 16,31,256(1) */
101 : "=Q" (*(addrtype *) vxrs) : : "1");
102}
103
Michael Holzheua62bc072014-10-06 17:57:43 +0200104static inline void save_vx_regs_safe(__vector128 *vxrs)
105{
106 unsigned long cr0, flags;
107
108 flags = arch_local_irq_save();
109 __ctl_store(cr0, 0, 0);
110 __ctl_set_bit(0, 17);
111 __ctl_set_bit(0, 18);
112 save_vx_regs(vxrs);
113 __ctl_load(cr0, 0, 0);
114 arch_local_irq_restore(flags);
115}
116
Martin Schwidefsky80703612014-10-06 17:53:53 +0200117static inline void restore_vx_regs(__vector128 *vxrs)
118{
119 typedef struct { __vector128 _[__NUM_VXRS]; } addrtype;
120
121 asm volatile(
122 " la 1,%0\n"
123 " .word 0xe70f,0x1000,0x0036\n" /* vlm 0,15,0(1) */
124 " .word 0xe70f,0x1100,0x0c36\n" /* vlm 16,31,256(1) */
125 : : "Q" (*(addrtype *) vxrs) : "1");
126}
127
128static inline void save_fp_vx_regs(struct task_struct *task)
129{
Martin Schwidefsky80703612014-10-06 17:53:53 +0200130 if (task->thread.vxrs)
131 save_vx_regs(task->thread.vxrs);
132 else
Heiko Carstens5a798592015-02-12 13:08:27 +0100133 save_fp_regs(task->thread.fp_regs.fprs);
Martin Schwidefsky80703612014-10-06 17:53:53 +0200134}
135
136static inline void restore_fp_vx_regs(struct task_struct *task)
137{
Martin Schwidefsky80703612014-10-06 17:53:53 +0200138 if (task->thread.vxrs)
139 restore_vx_regs(task->thread.vxrs);
140 else
Heiko Carstens5a798592015-02-12 13:08:27 +0100141 restore_fp_regs(task->thread.fp_regs.fprs);
Martin Schwidefsky80703612014-10-06 17:53:53 +0200142}
143
David Howellsa0616cd2012-03-28 18:30:02 +0100144static inline void save_access_regs(unsigned int *acrs)
145{
Heiko Carstensbee5c282013-08-20 10:24:12 +0200146 typedef struct { int _[NUM_ACRS]; } acrstype;
147
148 asm volatile("stam 0,15,%0" : "=Q" (*(acrstype *)acrs));
David Howellsa0616cd2012-03-28 18:30:02 +0100149}
150
151static inline void restore_access_regs(unsigned int *acrs)
152{
Heiko Carstensbee5c282013-08-20 10:24:12 +0200153 typedef struct { int _[NUM_ACRS]; } acrstype;
154
155 asm volatile("lam 0,15,%0" : : "Q" (*(acrstype *)acrs));
David Howellsa0616cd2012-03-28 18:30:02 +0100156}
157
158#define switch_to(prev,next,last) do { \
159 if (prev->mm) { \
Martin Schwidefsky4725c862013-10-15 16:08:34 +0200160 save_fp_ctl(&prev->thread.fp_regs.fpc); \
Martin Schwidefsky80703612014-10-06 17:53:53 +0200161 save_fp_vx_regs(prev); \
David Howellsa0616cd2012-03-28 18:30:02 +0100162 save_access_regs(&prev->thread.acrs[0]); \
Jan Glaubere4b8b3f2012-07-31 10:52:05 +0200163 save_ri_cb(prev->thread.ri_cb); \
David Howellsa0616cd2012-03-28 18:30:02 +0100164 } \
165 if (next->mm) { \
Martin Schwidefsky80703612014-10-06 17:53:53 +0200166 update_cr_regs(next); \
Martin Schwidefsky4725c862013-10-15 16:08:34 +0200167 restore_fp_ctl(&next->thread.fp_regs.fpc); \
Martin Schwidefsky80703612014-10-06 17:53:53 +0200168 restore_fp_vx_regs(next); \
David Howellsa0616cd2012-03-28 18:30:02 +0100169 restore_access_regs(&next->thread.acrs[0]); \
Jan Glaubere4b8b3f2012-07-31 10:52:05 +0200170 restore_ri_cb(next->thread.ri_cb, prev->thread.ri_cb); \
David Howellsa0616cd2012-03-28 18:30:02 +0100171 } \
172 prev = __switch_to(prev,next); \
173} while (0)
174
David Howellsa0616cd2012-03-28 18:30:02 +0100175#endif /* __ASM_SWITCH_TO_H */