blob: e50c77e69cb5c11c9ef99a9253102d89d047e62f [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
Ralf Baechlea3c49462006-03-13 16:16:29 +00006 * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
7 * Copyright (C) MIPS Technologies, Inc.
8 * written by Ralf Baechle <ralf@linux-mips.org>
Linus Torvalds1da177e2005-04-16 15:20:36 -07009 */
10#ifndef _ASM_HAZARDS_H
11#define _ASM_HAZARDS_H
12
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
Ralf Baechle36396f32006-09-25 15:49:49 +010014#ifdef __ASSEMBLY__
Ralf Baechled7d86aa2006-09-08 04:13:49 +020015#define ASMMACRO(name, code...) .macro name; code; .endm
Linus Torvalds1da177e2005-04-16 15:20:36 -070016#else
17
Ralf Baechled7d86aa2006-09-08 04:13:49 +020018#define ASMMACRO(name, code...) \
19__asm__(".macro " #name "; " #code "; .endm"); \
20 \
21static inline void name(void) \
22{ \
23 __asm__ __volatile__ (#name); \
24}
Linus Torvalds1da177e2005-04-16 15:20:36 -070025
Linus Torvalds1da177e2005-04-16 15:20:36 -070026#endif
27
Ralf Baechled7d86aa2006-09-08 04:13:49 +020028ASMMACRO(_ssnop,
29 sll $0, $0, 1
30 )
31
32ASMMACRO(_ehb,
33 sll $0, $0, 3
34 )
35
Linus Torvalds1da177e2005-04-16 15:20:36 -070036/*
Ralf Baechled7d86aa2006-09-08 04:13:49 +020037 * TLB hazards
38 */
39#if defined(CONFIG_CPU_MIPSR2)
40
41/*
42 * MIPSR2 defines ehb for hazard avoidance
Linus Torvalds1da177e2005-04-16 15:20:36 -070043 */
44
Ralf Baechled7d86aa2006-09-08 04:13:49 +020045ASMMACRO(mtc0_tlbw_hazard,
46 _ehb
47 )
48ASMMACRO(tlbw_use_hazard,
49 _ehb
50 )
51ASMMACRO(tlb_probe_hazard,
52 _ehb
53 )
54ASMMACRO(irq_enable_hazard,
Ralf Baechle7605b392007-03-20 13:56:50 +000055 _ehb
Ralf Baechled7d86aa2006-09-08 04:13:49 +020056 )
57ASMMACRO(irq_disable_hazard,
Linus Torvalds1da177e2005-04-16 15:20:36 -070058 _ehb
Ralf Baechled7d86aa2006-09-08 04:13:49 +020059 )
60ASMMACRO(back_to_back_c0_hazard,
61 _ehb
62 )
Ralf Baechle7043ad42005-12-22 13:41:29 +010063/*
64 * gcc has a tradition of misscompiling the previous construct using the
65 * address of a label as argument to inline assembler. Gas otoh has the
66 * annoying difference between la and dla which are only usable for 32-bit
67 * rsp. 64-bit code, so can't be used without conditional compilation.
68 * The alterantive is switching the assembler to 64-bit code which happens
69 * to work right even for 32-bit code ...
70 */
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000071#define instruction_hazard() \
72do { \
Ralf Baechle7043ad42005-12-22 13:41:29 +010073 unsigned long tmp; \
74 \
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000075 __asm__ __volatile__( \
Ralf Baechle7043ad42005-12-22 13:41:29 +010076 " .set mips64r2 \n" \
77 " dla %0, 1f \n" \
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000078 " jr.hb %0 \n" \
Ralf Baechle7043ad42005-12-22 13:41:29 +010079 " .set mips0 \n" \
80 "1: \n" \
81 : "=r" (tmp)); \
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000082} while (0)
83
Ralf Baechled7d86aa2006-09-08 04:13:49 +020084#elif defined(CONFIG_CPU_R10000)
85
86/*
87 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
88 */
89
90ASMMACRO(mtc0_tlbw_hazard,
91 )
92ASMMACRO(tlbw_use_hazard,
93 )
94ASMMACRO(tlb_probe_hazard,
95 )
96ASMMACRO(irq_enable_hazard,
97 )
98ASMMACRO(irq_disable_hazard,
99 )
100ASMMACRO(back_to_back_c0_hazard,
101 )
Ralf Baechlecc61c1f2005-07-12 18:35:38 +0000102#define instruction_hazard() do { } while (0)
Ralf Baechled7d86aa2006-09-08 04:13:49 +0200103
104#elif defined(CONFIG_CPU_RM9000)
105
106/*
107 * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
108 * use of the JTLB for instructions should not occur for 4 cpu cycles and use
109 * for data translations should not occur for 3 cpu cycles.
110 */
111
112ASMMACRO(mtc0_tlbw_hazard,
113 _ssnop; _ssnop; _ssnop; _ssnop
114 )
115ASMMACRO(tlbw_use_hazard,
116 _ssnop; _ssnop; _ssnop; _ssnop
117 )
118ASMMACRO(tlb_probe_hazard,
119 _ssnop; _ssnop; _ssnop; _ssnop
120 )
121ASMMACRO(irq_enable_hazard,
122 )
123ASMMACRO(irq_disable_hazard,
124 )
125ASMMACRO(back_to_back_c0_hazard,
126 )
127#define instruction_hazard() do { } while (0)
128
129#elif defined(CONFIG_CPU_SB1)
130
131/*
132 * Mostly like R4000 for historic reasons
133 */
134ASMMACRO(mtc0_tlbw_hazard,
135 )
136ASMMACRO(tlbw_use_hazard,
137 )
138ASMMACRO(tlb_probe_hazard,
139 )
140ASMMACRO(irq_enable_hazard,
141 )
142ASMMACRO(irq_disable_hazard,
143 _ssnop; _ssnop; _ssnop
144 )
145ASMMACRO(back_to_back_c0_hazard,
146 )
147#define instruction_hazard() do { } while (0)
148
149#else
150
151/*
152 * Finally the catchall case for all other processors including R4000, R4400,
153 * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
154 *
155 * The taken branch will result in a two cycle penalty for the two killed
156 * instructions on R4000 / R4400. Other processors only have a single cycle
157 * hazard so this is nice trick to have an optimal code for a range of
158 * processors.
159 */
160ASMMACRO(mtc0_tlbw_hazard,
Yoichi Yuasa3f318372007-01-24 22:22:06 +0900161 nop; nop
Ralf Baechled7d86aa2006-09-08 04:13:49 +0200162 )
163ASMMACRO(tlbw_use_hazard,
164 nop; nop; nop
165 )
166ASMMACRO(tlb_probe_hazard,
167 nop; nop; nop
168 )
169ASMMACRO(irq_enable_hazard,
170 )
171ASMMACRO(irq_disable_hazard,
172 nop; nop; nop
173 )
174ASMMACRO(back_to_back_c0_hazard,
175 _ssnop; _ssnop; _ssnop;
176 )
177#define instruction_hazard() do { } while (0)
178
Ralf Baechlecc61c1f2005-07-12 18:35:38 +0000179#endif
180
Linus Torvalds1da177e2005-04-16 15:20:36 -0700181#endif /* _ASM_HAZARDS_H */