blob: 50073157a617dfcd0583bb8cfde6621285953736 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
Ralf Baechlea3c49462006-03-13 16:16:29 +00006 * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
7 * Copyright (C) MIPS Technologies, Inc.
8 * written by Ralf Baechle <ralf@linux-mips.org>
Linus Torvalds1da177e2005-04-16 15:20:36 -07009 */
10#ifndef _ASM_HAZARDS_H
11#define _ASM_HAZARDS_H
12
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
Ralf Baechle36396f32006-09-25 15:49:49 +010014#ifdef __ASSEMBLY__
Ralf Baechled7d86aa2006-09-08 04:13:49 +020015#define ASMMACRO(name, code...) .macro name; code; .endm
Linus Torvalds1da177e2005-04-16 15:20:36 -070016#else
17
Ralf Baechled7d86aa2006-09-08 04:13:49 +020018#define ASMMACRO(name, code...) \
19__asm__(".macro " #name "; " #code "; .endm"); \
20 \
21static inline void name(void) \
22{ \
23 __asm__ __volatile__ (#name); \
24}
Linus Torvalds1da177e2005-04-16 15:20:36 -070025
Linus Torvalds1da177e2005-04-16 15:20:36 -070026#endif
27
Ralf Baechled7d86aa2006-09-08 04:13:49 +020028ASMMACRO(_ssnop,
29 sll $0, $0, 1
30 )
31
32ASMMACRO(_ehb,
33 sll $0, $0, 3
34 )
35
Linus Torvalds1da177e2005-04-16 15:20:36 -070036/*
Ralf Baechled7d86aa2006-09-08 04:13:49 +020037 * TLB hazards
38 */
39#if defined(CONFIG_CPU_MIPSR2)
40
41/*
42 * MIPSR2 defines ehb for hazard avoidance
Linus Torvalds1da177e2005-04-16 15:20:36 -070043 */
44
Ralf Baechled7d86aa2006-09-08 04:13:49 +020045ASMMACRO(mtc0_tlbw_hazard,
46 _ehb
47 )
48ASMMACRO(tlbw_use_hazard,
49 _ehb
50 )
51ASMMACRO(tlb_probe_hazard,
52 _ehb
53 )
54ASMMACRO(irq_enable_hazard,
55 )
56ASMMACRO(irq_disable_hazard,
Linus Torvalds1da177e2005-04-16 15:20:36 -070057 _ehb
Ralf Baechled7d86aa2006-09-08 04:13:49 +020058 )
59ASMMACRO(back_to_back_c0_hazard,
60 _ehb
61 )
Ralf Baechle7043ad42005-12-22 13:41:29 +010062/*
63 * gcc has a tradition of misscompiling the previous construct using the
64 * address of a label as argument to inline assembler. Gas otoh has the
65 * annoying difference between la and dla which are only usable for 32-bit
66 * rsp. 64-bit code, so can't be used without conditional compilation.
67 * The alterantive is switching the assembler to 64-bit code which happens
68 * to work right even for 32-bit code ...
69 */
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000070#define instruction_hazard() \
71do { \
Ralf Baechle7043ad42005-12-22 13:41:29 +010072 unsigned long tmp; \
73 \
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000074 __asm__ __volatile__( \
Ralf Baechle7043ad42005-12-22 13:41:29 +010075 " .set mips64r2 \n" \
76 " dla %0, 1f \n" \
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000077 " jr.hb %0 \n" \
Ralf Baechle7043ad42005-12-22 13:41:29 +010078 " .set mips0 \n" \
79 "1: \n" \
80 : "=r" (tmp)); \
Ralf Baechlecc61c1f2005-07-12 18:35:38 +000081} while (0)
82
Ralf Baechled7d86aa2006-09-08 04:13:49 +020083#elif defined(CONFIG_CPU_R10000)
84
85/*
86 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
87 */
88
89ASMMACRO(mtc0_tlbw_hazard,
90 )
91ASMMACRO(tlbw_use_hazard,
92 )
93ASMMACRO(tlb_probe_hazard,
94 )
95ASMMACRO(irq_enable_hazard,
96 )
97ASMMACRO(irq_disable_hazard,
98 )
99ASMMACRO(back_to_back_c0_hazard,
100 )
Ralf Baechlecc61c1f2005-07-12 18:35:38 +0000101#define instruction_hazard() do { } while (0)
Ralf Baechled7d86aa2006-09-08 04:13:49 +0200102
103#elif defined(CONFIG_CPU_RM9000)
104
105/*
106 * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
107 * use of the JTLB for instructions should not occur for 4 cpu cycles and use
108 * for data translations should not occur for 3 cpu cycles.
109 */
110
111ASMMACRO(mtc0_tlbw_hazard,
112 _ssnop; _ssnop; _ssnop; _ssnop
113 )
114ASMMACRO(tlbw_use_hazard,
115 _ssnop; _ssnop; _ssnop; _ssnop
116 )
117ASMMACRO(tlb_probe_hazard,
118 _ssnop; _ssnop; _ssnop; _ssnop
119 )
120ASMMACRO(irq_enable_hazard,
121 )
122ASMMACRO(irq_disable_hazard,
123 )
124ASMMACRO(back_to_back_c0_hazard,
125 )
126#define instruction_hazard() do { } while (0)
127
128#elif defined(CONFIG_CPU_SB1)
129
130/*
131 * Mostly like R4000 for historic reasons
132 */
133ASMMACRO(mtc0_tlbw_hazard,
134 )
135ASMMACRO(tlbw_use_hazard,
136 )
137ASMMACRO(tlb_probe_hazard,
138 )
139ASMMACRO(irq_enable_hazard,
140 )
141ASMMACRO(irq_disable_hazard,
142 _ssnop; _ssnop; _ssnop
143 )
144ASMMACRO(back_to_back_c0_hazard,
145 )
146#define instruction_hazard() do { } while (0)
147
148#else
149
150/*
151 * Finally the catchall case for all other processors including R4000, R4400,
152 * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
153 *
154 * The taken branch will result in a two cycle penalty for the two killed
155 * instructions on R4000 / R4400. Other processors only have a single cycle
156 * hazard so this is nice trick to have an optimal code for a range of
157 * processors.
158 */
159ASMMACRO(mtc0_tlbw_hazard,
Yoichi Yuasa3f318372007-01-24 22:22:06 +0900160 nop; nop
Ralf Baechled7d86aa2006-09-08 04:13:49 +0200161 )
162ASMMACRO(tlbw_use_hazard,
163 nop; nop; nop
164 )
165ASMMACRO(tlb_probe_hazard,
166 nop; nop; nop
167 )
168ASMMACRO(irq_enable_hazard,
169 )
170ASMMACRO(irq_disable_hazard,
171 nop; nop; nop
172 )
173ASMMACRO(back_to_back_c0_hazard,
174 _ssnop; _ssnop; _ssnop;
175 )
176#define instruction_hazard() do { } while (0)
177
Ralf Baechlecc61c1f2005-07-12 18:35:38 +0000178#endif
179
Linus Torvalds1da177e2005-04-16 15:20:36 -0700180#endif /* _ASM_HAZARDS_H */