blob: cc4296595df5cdbea9edf23aaf1ea559c001b8d7 [file] [log] [blame]
Jayachandran C5c64250672011-05-07 01:36:40 +05301/*
2 * Copyright 2003-2011 NetLogic Microsystems, Inc. (NetLogic). All rights
3 * reserved.
4 *
5 * This software is available to you under a choice of one of two
6 * licenses. You may choose to be licensed under the terms of the GNU
7 * General Public License (GPL) Version 2, available from the file
8 * COPYING in the main directory of this source tree, or the NetLogic
9 * license below:
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 *
15 * 1. Redistributions of source code must retain the above copyright
16 * notice, this list of conditions and the following disclaimer.
17 * 2. Redistributions in binary form must reproduce the above copyright
18 * notice, this list of conditions and the following disclaimer in
19 * the documentation and/or other materials provided with the
20 * distribution.
21 *
22 * THIS SOFTWARE IS PROVIDED BY NETLOGIC ``AS IS'' AND ANY EXPRESS OR
23 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25 * ARE DISCLAIMED. IN NO EVENT SHALL NETLOGIC OR CONTRIBUTORS BE LIABLE
26 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
29 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
30 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
31 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
32 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 */
34
35#ifndef _ASM_NLM_MIPS_EXTS_H
36#define _ASM_NLM_MIPS_EXTS_H
37
38/*
39 * XLR and XLP interrupt request and interrupt mask registers
40 */
41#define read_c0_eirr() __read_64bit_c0_register($9, 6)
42#define read_c0_eimr() __read_64bit_c0_register($9, 7)
43#define write_c0_eirr(val) __write_64bit_c0_register($9, 6, val)
44
45/*
46 * Writing EIMR in 32 bit is a special case, the lower 8 bit of the
47 * EIMR is shadowed in the status register, so we cannot save and
48 * restore status register for split read.
49 */
50#define write_c0_eimr(val) \
51do { \
52 if (sizeof(unsigned long) == 4) { \
53 unsigned long __flags; \
54 \
55 local_irq_save(__flags); \
56 __asm__ __volatile__( \
57 ".set\tmips64\n\t" \
58 "dsll\t%L0, %L0, 32\n\t" \
59 "dsrl\t%L0, %L0, 32\n\t" \
60 "dsll\t%M0, %M0, 32\n\t" \
61 "or\t%L0, %L0, %M0\n\t" \
62 "dmtc0\t%L0, $9, 7\n\t" \
63 ".set\tmips0" \
64 : : "r" (val)); \
65 __flags = (__flags & 0xffff00ff) | (((val) & 0xff) << 8);\
66 local_irq_restore(__flags); \
67 } else \
68 __write_64bit_c0_register($9, 7, (val)); \
69} while (0)
70
Jayachandran C220d9122013-01-14 15:11:54 +000071/*
72 * Handling the 64 bit EIMR and EIRR registers in 32-bit mode with
73 * standard functions will be very inefficient. This provides
74 * optimized functions for the normal operations on the registers.
75 *
76 * Call with interrupts disabled.
77 */
78static inline void ack_c0_eirr(int irq)
79{
80 __asm__ __volatile__(
81 ".set push\n\t"
82 ".set mips64\n\t"
83 ".set noat\n\t"
84 "li $1, 1\n\t"
85 "dsllv $1, $1, %0\n\t"
86 "dmtc0 $1, $9, 6\n\t"
87 ".set pop"
88 : : "r" (irq));
89}
90
91static inline void set_c0_eimr(int irq)
92{
93 __asm__ __volatile__(
94 ".set push\n\t"
95 ".set mips64\n\t"
96 ".set noat\n\t"
97 "li $1, 1\n\t"
98 "dsllv %0, $1, %0\n\t"
99 "dmfc0 $1, $9, 7\n\t"
100 "or $1, %0\n\t"
101 "dmtc0 $1, $9, 7\n\t"
102 ".set pop"
103 : "+r" (irq));
104}
105
106static inline void clear_c0_eimr(int irq)
107{
108 __asm__ __volatile__(
109 ".set push\n\t"
110 ".set mips64\n\t"
111 ".set noat\n\t"
112 "li $1, 1\n\t"
113 "dsllv %0, $1, %0\n\t"
114 "dmfc0 $1, $9, 7\n\t"
115 "or $1, %0\n\t"
116 "xor $1, %0\n\t"
117 "dmtc0 $1, $9, 7\n\t"
118 ".set pop"
119 : "+r" (irq));
120}
121
122/*
123 * Read c0 eimr and c0 eirr, do AND of the two values, the result is
124 * the interrupts which are raised and are not masked.
125 */
126static inline uint64_t read_c0_eirr_and_eimr(void)
127{
128 uint64_t val;
129
130#ifdef CONFIG_64BIT
131 val = read_c0_eimr() & read_c0_eirr();
132#else
133 __asm__ __volatile__(
134 ".set push\n\t"
135 ".set mips64\n\t"
136 ".set noat\n\t"
137 "dmfc0 %M0, $9, 6\n\t"
138 "dmfc0 %L0, $9, 7\n\t"
139 "and %M0, %L0\n\t"
140 "dsll %L0, %M0, 32\n\t"
141 "dsra %M0, %M0, 32\n\t"
142 "dsra %L0, %L0, 32\n\t"
143 ".set pop"
144 : "=r" (val));
145#endif
146
147 return val;
148}
149
Jayachandran C5c64250672011-05-07 01:36:40 +0530150static inline int hard_smp_processor_id(void)
151{
152 return __read_32bit_c0_register($15, 1) & 0x3ff;
153}
154
Jayachandran C77ae7982012-10-31 12:01:39 +0000155static inline int nlm_nodeid(void)
156{
157 return (__read_32bit_c0_register($15, 1) >> 5) & 0x3;
158}
159
Ganesan Ramalingamed21cfe2012-10-31 12:01:42 +0000160static inline unsigned int nlm_core_id(void)
161{
162 return (read_c0_ebase() & 0x1c) >> 2;
163}
164
165static inline unsigned int nlm_thread_id(void)
166{
167 return read_c0_ebase() & 0x3;
168}
169
170#define __read_64bit_c2_split(source, sel) \
171({ \
172 unsigned long long __val; \
173 unsigned long __flags; \
174 \
175 local_irq_save(__flags); \
176 if (sel == 0) \
177 __asm__ __volatile__( \
178 ".set\tmips64\n\t" \
179 "dmfc2\t%M0, " #source "\n\t" \
180 "dsll\t%L0, %M0, 32\n\t" \
181 "dsra\t%M0, %M0, 32\n\t" \
182 "dsra\t%L0, %L0, 32\n\t" \
183 ".set\tmips0\n\t" \
184 : "=r" (__val)); \
185 else \
186 __asm__ __volatile__( \
187 ".set\tmips64\n\t" \
188 "dmfc2\t%M0, " #source ", " #sel "\n\t" \
189 "dsll\t%L0, %M0, 32\n\t" \
190 "dsra\t%M0, %M0, 32\n\t" \
191 "dsra\t%L0, %L0, 32\n\t" \
192 ".set\tmips0\n\t" \
193 : "=r" (__val)); \
194 local_irq_restore(__flags); \
195 \
196 __val; \
197})
198
199#define __write_64bit_c2_split(source, sel, val) \
200do { \
201 unsigned long __flags; \
202 \
203 local_irq_save(__flags); \
204 if (sel == 0) \
205 __asm__ __volatile__( \
206 ".set\tmips64\n\t" \
207 "dsll\t%L0, %L0, 32\n\t" \
208 "dsrl\t%L0, %L0, 32\n\t" \
209 "dsll\t%M0, %M0, 32\n\t" \
210 "or\t%L0, %L0, %M0\n\t" \
211 "dmtc2\t%L0, " #source "\n\t" \
212 ".set\tmips0\n\t" \
213 : : "r" (val)); \
214 else \
215 __asm__ __volatile__( \
216 ".set\tmips64\n\t" \
217 "dsll\t%L0, %L0, 32\n\t" \
218 "dsrl\t%L0, %L0, 32\n\t" \
219 "dsll\t%M0, %M0, 32\n\t" \
220 "or\t%L0, %L0, %M0\n\t" \
221 "dmtc2\t%L0, " #source ", " #sel "\n\t" \
222 ".set\tmips0\n\t" \
223 : : "r" (val)); \
224 local_irq_restore(__flags); \
225} while (0)
226
227#define __read_32bit_c2_register(source, sel) \
228({ uint32_t __res; \
229 if (sel == 0) \
230 __asm__ __volatile__( \
231 ".set\tmips32\n\t" \
232 "mfc2\t%0, " #source "\n\t" \
233 ".set\tmips0\n\t" \
234 : "=r" (__res)); \
235 else \
236 __asm__ __volatile__( \
237 ".set\tmips32\n\t" \
238 "mfc2\t%0, " #source ", " #sel "\n\t" \
239 ".set\tmips0\n\t" \
240 : "=r" (__res)); \
241 __res; \
242})
243
244#define __read_64bit_c2_register(source, sel) \
245({ unsigned long long __res; \
246 if (sizeof(unsigned long) == 4) \
247 __res = __read_64bit_c2_split(source, sel); \
248 else if (sel == 0) \
249 __asm__ __volatile__( \
250 ".set\tmips64\n\t" \
251 "dmfc2\t%0, " #source "\n\t" \
252 ".set\tmips0\n\t" \
253 : "=r" (__res)); \
254 else \
255 __asm__ __volatile__( \
256 ".set\tmips64\n\t" \
257 "dmfc2\t%0, " #source ", " #sel "\n\t" \
258 ".set\tmips0\n\t" \
259 : "=r" (__res)); \
260 __res; \
261})
262
263#define __write_64bit_c2_register(register, sel, value) \
264do { \
265 if (sizeof(unsigned long) == 4) \
266 __write_64bit_c2_split(register, sel, value); \
267 else if (sel == 0) \
268 __asm__ __volatile__( \
269 ".set\tmips64\n\t" \
270 "dmtc2\t%z0, " #register "\n\t" \
271 ".set\tmips0\n\t" \
272 : : "Jr" (value)); \
273 else \
274 __asm__ __volatile__( \
275 ".set\tmips64\n\t" \
276 "dmtc2\t%z0, " #register ", " #sel "\n\t" \
277 ".set\tmips0\n\t" \
278 : : "Jr" (value)); \
279} while (0)
280
281#define __write_32bit_c2_register(reg, sel, value) \
282({ \
283 if (sel == 0) \
284 __asm__ __volatile__( \
285 ".set\tmips32\n\t" \
286 "mtc2\t%z0, " #reg "\n\t" \
287 ".set\tmips0\n\t" \
288 : : "Jr" (value)); \
289 else \
290 __asm__ __volatile__( \
291 ".set\tmips32\n\t" \
292 "mtc2\t%z0, " #reg ", " #sel "\n\t" \
293 ".set\tmips0\n\t" \
294 : : "Jr" (value)); \
295})
296
Jayachandran C5c64250672011-05-07 01:36:40 +0530297#endif /*_ASM_NLM_MIPS_EXTS_H */