blob: 69d18a0e0581c10323860b220a0e800f022e8373 [file] [log] [blame]
Jayachandran C5c64250672011-05-07 01:36:40 +05301/*
2 * Copyright 2003-2011 NetLogic Microsystems, Inc. (NetLogic). All rights
3 * reserved.
4 *
5 * This software is available to you under a choice of one of two
6 * licenses. You may choose to be licensed under the terms of the GNU
7 * General Public License (GPL) Version 2, available from the file
8 * COPYING in the main directory of this source tree, or the NetLogic
9 * license below:
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 *
15 * 1. Redistributions of source code must retain the above copyright
16 * notice, this list of conditions and the following disclaimer.
17 * 2. Redistributions in binary form must reproduce the above copyright
18 * notice, this list of conditions and the following disclaimer in
19 * the documentation and/or other materials provided with the
20 * distribution.
21 *
22 * THIS SOFTWARE IS PROVIDED BY NETLOGIC ``AS IS'' AND ANY EXPRESS OR
23 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25 * ARE DISCLAIMED. IN NO EVENT SHALL NETLOGIC OR CONTRIBUTORS BE LIABLE
26 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
29 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
30 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
31 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
32 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 */
34
35#ifndef _ASM_NLM_MIPS_EXTS_H
36#define _ASM_NLM_MIPS_EXTS_H
37
38/*
39 * XLR and XLP interrupt request and interrupt mask registers
40 */
41#define read_c0_eirr() __read_64bit_c0_register($9, 6)
42#define read_c0_eimr() __read_64bit_c0_register($9, 7)
43#define write_c0_eirr(val) __write_64bit_c0_register($9, 6, val)
44
45/*
Jayachandran C33ff7122013-03-23 17:27:53 +000046 * NOTE: Do not save/restore flags around write_c0_eimr().
47 * On non-R2 platforms the flags has part of EIMR that is shadowed in STATUS
48 * register. Restoring flags will overwrite the lower 8 bits of EIMR.
49 *
50 * Call with interrupts disabled.
Jayachandran C5c64250672011-05-07 01:36:40 +053051 */
52#define write_c0_eimr(val) \
53do { \
Ralf Baechle70342282013-01-22 12:59:30 +010054 if (sizeof(unsigned long) == 4) { \
Jayachandran C5c64250672011-05-07 01:36:40 +053055 __asm__ __volatile__( \
56 ".set\tmips64\n\t" \
57 "dsll\t%L0, %L0, 32\n\t" \
58 "dsrl\t%L0, %L0, 32\n\t" \
59 "dsll\t%M0, %M0, 32\n\t" \
60 "or\t%L0, %L0, %M0\n\t" \
61 "dmtc0\t%L0, $9, 7\n\t" \
62 ".set\tmips0" \
63 : : "r" (val)); \
Jayachandran C5c64250672011-05-07 01:36:40 +053064 } else \
65 __write_64bit_c0_register($9, 7, (val)); \
66} while (0)
67
Jayachandran C220d9122013-01-14 15:11:54 +000068/*
69 * Handling the 64 bit EIMR and EIRR registers in 32-bit mode with
70 * standard functions will be very inefficient. This provides
71 * optimized functions for the normal operations on the registers.
72 *
73 * Call with interrupts disabled.
74 */
75static inline void ack_c0_eirr(int irq)
76{
77 __asm__ __volatile__(
78 ".set push\n\t"
79 ".set mips64\n\t"
80 ".set noat\n\t"
81 "li $1, 1\n\t"
82 "dsllv $1, $1, %0\n\t"
83 "dmtc0 $1, $9, 6\n\t"
84 ".set pop"
85 : : "r" (irq));
86}
87
88static inline void set_c0_eimr(int irq)
89{
90 __asm__ __volatile__(
91 ".set push\n\t"
92 ".set mips64\n\t"
93 ".set noat\n\t"
94 "li $1, 1\n\t"
95 "dsllv %0, $1, %0\n\t"
96 "dmfc0 $1, $9, 7\n\t"
97 "or $1, %0\n\t"
98 "dmtc0 $1, $9, 7\n\t"
99 ".set pop"
100 : "+r" (irq));
101}
102
103static inline void clear_c0_eimr(int irq)
104{
105 __asm__ __volatile__(
106 ".set push\n\t"
107 ".set mips64\n\t"
108 ".set noat\n\t"
109 "li $1, 1\n\t"
110 "dsllv %0, $1, %0\n\t"
111 "dmfc0 $1, $9, 7\n\t"
112 "or $1, %0\n\t"
113 "xor $1, %0\n\t"
114 "dmtc0 $1, $9, 7\n\t"
115 ".set pop"
116 : "+r" (irq));
117}
118
119/*
120 * Read c0 eimr and c0 eirr, do AND of the two values, the result is
121 * the interrupts which are raised and are not masked.
122 */
123static inline uint64_t read_c0_eirr_and_eimr(void)
124{
125 uint64_t val;
126
127#ifdef CONFIG_64BIT
128 val = read_c0_eimr() & read_c0_eirr();
129#else
130 __asm__ __volatile__(
131 ".set push\n\t"
132 ".set mips64\n\t"
133 ".set noat\n\t"
134 "dmfc0 %M0, $9, 6\n\t"
135 "dmfc0 %L0, $9, 7\n\t"
136 "and %M0, %L0\n\t"
137 "dsll %L0, %M0, 32\n\t"
138 "dsra %M0, %M0, 32\n\t"
139 "dsra %L0, %L0, 32\n\t"
140 ".set pop"
141 : "=r" (val));
142#endif
143
144 return val;
145}
146
Jayachandran C5c64250672011-05-07 01:36:40 +0530147static inline int hard_smp_processor_id(void)
148{
149 return __read_32bit_c0_register($15, 1) & 0x3ff;
150}
151
Jayachandran C77ae7982012-10-31 12:01:39 +0000152static inline int nlm_nodeid(void)
153{
154 return (__read_32bit_c0_register($15, 1) >> 5) & 0x3;
155}
156
Ganesan Ramalingamed21cfe2012-10-31 12:01:42 +0000157static inline unsigned int nlm_core_id(void)
158{
159 return (read_c0_ebase() & 0x1c) >> 2;
160}
161
162static inline unsigned int nlm_thread_id(void)
163{
164 return read_c0_ebase() & 0x3;
165}
166
167#define __read_64bit_c2_split(source, sel) \
168({ \
169 unsigned long long __val; \
170 unsigned long __flags; \
171 \
172 local_irq_save(__flags); \
173 if (sel == 0) \
174 __asm__ __volatile__( \
175 ".set\tmips64\n\t" \
176 "dmfc2\t%M0, " #source "\n\t" \
177 "dsll\t%L0, %M0, 32\n\t" \
178 "dsra\t%M0, %M0, 32\n\t" \
179 "dsra\t%L0, %L0, 32\n\t" \
180 ".set\tmips0\n\t" \
181 : "=r" (__val)); \
182 else \
183 __asm__ __volatile__( \
184 ".set\tmips64\n\t" \
185 "dmfc2\t%M0, " #source ", " #sel "\n\t" \
186 "dsll\t%L0, %M0, 32\n\t" \
187 "dsra\t%M0, %M0, 32\n\t" \
188 "dsra\t%L0, %L0, 32\n\t" \
189 ".set\tmips0\n\t" \
190 : "=r" (__val)); \
191 local_irq_restore(__flags); \
192 \
193 __val; \
194})
195
196#define __write_64bit_c2_split(source, sel, val) \
197do { \
198 unsigned long __flags; \
199 \
200 local_irq_save(__flags); \
201 if (sel == 0) \
202 __asm__ __volatile__( \
203 ".set\tmips64\n\t" \
204 "dsll\t%L0, %L0, 32\n\t" \
205 "dsrl\t%L0, %L0, 32\n\t" \
206 "dsll\t%M0, %M0, 32\n\t" \
207 "or\t%L0, %L0, %M0\n\t" \
208 "dmtc2\t%L0, " #source "\n\t" \
209 ".set\tmips0\n\t" \
210 : : "r" (val)); \
211 else \
212 __asm__ __volatile__( \
213 ".set\tmips64\n\t" \
214 "dsll\t%L0, %L0, 32\n\t" \
215 "dsrl\t%L0, %L0, 32\n\t" \
216 "dsll\t%M0, %M0, 32\n\t" \
217 "or\t%L0, %L0, %M0\n\t" \
218 "dmtc2\t%L0, " #source ", " #sel "\n\t" \
219 ".set\tmips0\n\t" \
220 : : "r" (val)); \
221 local_irq_restore(__flags); \
222} while (0)
223
224#define __read_32bit_c2_register(source, sel) \
225({ uint32_t __res; \
226 if (sel == 0) \
227 __asm__ __volatile__( \
228 ".set\tmips32\n\t" \
229 "mfc2\t%0, " #source "\n\t" \
230 ".set\tmips0\n\t" \
231 : "=r" (__res)); \
232 else \
233 __asm__ __volatile__( \
234 ".set\tmips32\n\t" \
235 "mfc2\t%0, " #source ", " #sel "\n\t" \
236 ".set\tmips0\n\t" \
237 : "=r" (__res)); \
238 __res; \
239})
240
241#define __read_64bit_c2_register(source, sel) \
242({ unsigned long long __res; \
243 if (sizeof(unsigned long) == 4) \
244 __res = __read_64bit_c2_split(source, sel); \
245 else if (sel == 0) \
246 __asm__ __volatile__( \
247 ".set\tmips64\n\t" \
248 "dmfc2\t%0, " #source "\n\t" \
249 ".set\tmips0\n\t" \
250 : "=r" (__res)); \
251 else \
252 __asm__ __volatile__( \
253 ".set\tmips64\n\t" \
254 "dmfc2\t%0, " #source ", " #sel "\n\t" \
255 ".set\tmips0\n\t" \
256 : "=r" (__res)); \
257 __res; \
258})
259
260#define __write_64bit_c2_register(register, sel, value) \
261do { \
262 if (sizeof(unsigned long) == 4) \
263 __write_64bit_c2_split(register, sel, value); \
264 else if (sel == 0) \
265 __asm__ __volatile__( \
266 ".set\tmips64\n\t" \
267 "dmtc2\t%z0, " #register "\n\t" \
268 ".set\tmips0\n\t" \
269 : : "Jr" (value)); \
270 else \
271 __asm__ __volatile__( \
272 ".set\tmips64\n\t" \
273 "dmtc2\t%z0, " #register ", " #sel "\n\t" \
274 ".set\tmips0\n\t" \
275 : : "Jr" (value)); \
276} while (0)
277
278#define __write_32bit_c2_register(reg, sel, value) \
279({ \
280 if (sel == 0) \
281 __asm__ __volatile__( \
282 ".set\tmips32\n\t" \
283 "mtc2\t%z0, " #reg "\n\t" \
284 ".set\tmips0\n\t" \
285 : : "Jr" (value)); \
286 else \
Ralf Baechle70342282013-01-22 12:59:30 +0100287 __asm__ __volatile__( \
Ganesan Ramalingamed21cfe2012-10-31 12:01:42 +0000288 ".set\tmips32\n\t" \
289 "mtc2\t%z0, " #reg ", " #sel "\n\t" \
290 ".set\tmips0\n\t" \
291 : : "Jr" (value)); \
292})
293
Jayachandran C5c64250672011-05-07 01:36:40 +0530294#endif /*_ASM_NLM_MIPS_EXTS_H */