Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
hazards.h
Go to the documentation of this file.
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License. See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 04, 07 Ralf Baechle <[email protected]>
7  * Copyright (C) MIPS Technologies, Inc.
8  * written by Ralf Baechle <[email protected]>
9  */
10 #ifndef _ASM_HAZARDS_H
11 #define _ASM_HAZARDS_H
12 
13 #ifdef __ASSEMBLY__
14 #define ASMMACRO(name, code...) .macro name; code; .endm
15 #else
16 
17 #include <asm/cpu-features.h>
18 
19 #define ASMMACRO(name, code...) \
20 __asm__(".macro " #name "; " #code "; .endm"); \
21  \
22 static inline void name(void) \
23 { \
24  __asm__ __volatile__ (#name); \
25 }
26 
27 /*
28  * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
29  */
30 extern void mips_ihb(void);
31 
32 #endif
33 
35  sll $0, $0, 1
36  )
37 
38 ASMMACRO(_ehb,
39  sll $0, $0, 3
40  )
41 
42 /*
43  * TLB hazards
44  */
45 #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON)
46 
47 /*
48  * MIPSR2 defines ehb for hazard avoidance
49  */
50 
51 ASMMACRO(mtc0_tlbw_hazard,
52  _ehb
53  )
54 ASMMACRO(tlbw_use_hazard,
55  _ehb
56  )
57 ASMMACRO(tlb_probe_hazard,
58  _ehb
59  )
60 ASMMACRO(irq_enable_hazard,
61  _ehb
62  )
63 ASMMACRO(irq_disable_hazard,
64  _ehb
65  )
66 ASMMACRO(back_to_back_c0_hazard,
67  _ehb
68  )
69 /*
70  * gcc has a tradition of misscompiling the previous construct using the
71  * address of a label as argument to inline assembler. Gas otoh has the
72  * annoying difference between la and dla which are only usable for 32-bit
73  * rsp. 64-bit code, so can't be used without conditional compilation.
74  * The alterantive is switching the assembler to 64-bit code which happens
75  * to work right even for 32-bit code ...
76  */
77 #define instruction_hazard() \
78 do { \
79  unsigned long tmp; \
80  \
81  __asm__ __volatile__( \
82  " .set mips64r2 \n" \
83  " dla %0, 1f \n" \
84  " jr.hb %0 \n" \
85  " .set mips0 \n" \
86  "1: \n" \
87  : "=r" (tmp)); \
88 } while (0)
89 
90 #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \
91  defined(CONFIG_CPU_BMIPS)
92 
93 /*
94  * These are slightly complicated by the fact that we guarantee R1 kernels to
95  * run fine on R2 processors.
96  */
97 ASMMACRO(mtc0_tlbw_hazard,
98  _ssnop; _ssnop; _ehb
99  )
100 ASMMACRO(tlbw_use_hazard,
101  _ssnop; _ssnop; _ssnop; _ehb
102  )
103 ASMMACRO(tlb_probe_hazard,
104  _ssnop; _ssnop; _ssnop; _ehb
105  )
106 ASMMACRO(irq_enable_hazard,
107  _ssnop; _ssnop; _ssnop; _ehb
108  )
109 ASMMACRO(irq_disable_hazard,
110  _ssnop; _ssnop; _ssnop; _ehb
111  )
112 ASMMACRO(back_to_back_c0_hazard,
113  _ssnop; _ssnop; _ssnop; _ehb
114  )
115 /*
116  * gcc has a tradition of misscompiling the previous construct using the
117  * address of a label as argument to inline assembler. Gas otoh has the
118  * annoying difference between la and dla which are only usable for 32-bit
119  * rsp. 64-bit code, so can't be used without conditional compilation.
120  * The alterantive is switching the assembler to 64-bit code which happens
121  * to work right even for 32-bit code ...
122  */
123 #define __instruction_hazard() \
124 do { \
125  unsigned long tmp; \
126  \
127  __asm__ __volatile__( \
128  " .set mips64r2 \n" \
129  " dla %0, 1f \n" \
130  " jr.hb %0 \n" \
131  " .set mips0 \n" \
132  "1: \n" \
133  : "=r" (tmp)); \
134 } while (0)
135 
136 #define instruction_hazard() \
137 do { \
138  if (cpu_has_mips_r2) \
139  __instruction_hazard(); \
140 } while (0)
141 
142 #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
143  defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \
144  defined(CONFIG_CPU_R5500)
145 
146 /*
147  * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
148  */
149 
150 ASMMACRO(mtc0_tlbw_hazard,
151  )
152 ASMMACRO(tlbw_use_hazard,
153  )
154 ASMMACRO(tlb_probe_hazard,
155  )
156 ASMMACRO(irq_enable_hazard,
157  )
158 ASMMACRO(irq_disable_hazard,
159  )
160 ASMMACRO(back_to_back_c0_hazard,
161  )
162 #define instruction_hazard() do { } while (0)
163 
164 #elif defined(CONFIG_CPU_RM9000)
165 
166 /*
167  * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
168  * use of the JTLB for instructions should not occur for 4 cpu cycles and use
169  * for data translations should not occur for 3 cpu cycles.
170  */
171 
172 ASMMACRO(mtc0_tlbw_hazard,
173  _ssnop; _ssnop; _ssnop; _ssnop
174  )
175 ASMMACRO(tlbw_use_hazard,
176  _ssnop; _ssnop; _ssnop; _ssnop
177  )
178 ASMMACRO(tlb_probe_hazard,
179  _ssnop; _ssnop; _ssnop; _ssnop
180  )
181 ASMMACRO(irq_enable_hazard,
182  )
183 ASMMACRO(irq_disable_hazard,
184  )
185 ASMMACRO(back_to_back_c0_hazard,
186  )
187 #define instruction_hazard() do { } while (0)
188 
189 #elif defined(CONFIG_CPU_SB1)
190 
191 /*
192  * Mostly like R4000 for historic reasons
193  */
194 ASMMACRO(mtc0_tlbw_hazard,
195  )
196 ASMMACRO(tlbw_use_hazard,
197  )
198 ASMMACRO(tlb_probe_hazard,
199  )
200 ASMMACRO(irq_enable_hazard,
201  )
202 ASMMACRO(irq_disable_hazard,
203  _ssnop; _ssnop; _ssnop
204  )
205 ASMMACRO(back_to_back_c0_hazard,
206  )
207 #define instruction_hazard() do { } while (0)
208 
209 #else
210 
211 /*
212  * Finally the catchall case for all other processors including R4000, R4400,
213  * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
214  *
215  * The taken branch will result in a two cycle penalty for the two killed
216  * instructions on R4000 / R4400. Other processors only have a single cycle
217  * hazard so this is nice trick to have an optimal code for a range of
218  * processors.
219  */
220 ASMMACRO(mtc0_tlbw_hazard,
221  nop; nop
222  )
223 ASMMACRO(tlbw_use_hazard,
225  )
226 ASMMACRO(tlb_probe_hazard,
227  nop; nop; nop
228  )
229 ASMMACRO(irq_enable_hazard,
230  _ssnop; _ssnop; _ssnop;
231  )
232 ASMMACRO(irq_disable_hazard,
233  nop; nop; nop
234  )
235 ASMMACRO(back_to_back_c0_hazard,
236  _ssnop; _ssnop; _ssnop;
237  )
238 #define instruction_hazard() do { } while (0)
239 
240 #endif
241 
242 
243 /* FPU hazards */
244 
245 #if defined(CONFIG_CPU_SB1)
246 ASMMACRO(enable_fpu_hazard,
247  .set push;
248  .set mips64;
249  .set noreorder;
250  _ssnop;
251  bnezl $0, .+4;
252  _ssnop;
253  .set pop
254 )
255 ASMMACRO(disable_fpu_hazard,
256 )
257 
258 #elif defined(CONFIG_CPU_MIPSR2)
259 ASMMACRO(enable_fpu_hazard,
260  _ehb
261 )
262 ASMMACRO(disable_fpu_hazard,
263  _ehb
264 )
265 #else
266 ASMMACRO(enable_fpu_hazard,
267  nop; nop; nop; nop
268 )
269 ASMMACRO(disable_fpu_hazard,
270  _ehb
271 )
272 #endif
273 
274 #endif /* _ASM_HAZARDS_H */