15 #ifndef _ASM_TILE_IRQFLAGS_H
16 #define _ASM_TILE_IRQFLAGS_H
21 #if !defined(__tilegx__) && defined(__ASSEMBLY__)
29 #if CHIP_HAS_AUX_PERF_COUNTERS()
30 #define LINUX_MASKABLE_INTERRUPTS_HI \
31 (~(INT_MASK_HI(INT_PERF_COUNT) | INT_MASK_HI(INT_AUX_PERF_COUNT)))
33 #define LINUX_MASKABLE_INTERRUPTS_HI \
34 (~(INT_MASK_HI(INT_PERF_COUNT)))
39 #if CHIP_HAS_AUX_PERF_COUNTERS()
40 #define LINUX_MASKABLE_INTERRUPTS \
41 (~(INT_MASK(INT_PERF_COUNT) | INT_MASK(INT_AUX_PERF_COUNT)))
43 #define LINUX_MASKABLE_INTERRUPTS \
44 (~(INT_MASK(INT_PERF_COUNT)))
52 #include <asm/percpu.h>
53 #include <arch/spr_def.h>
56 #if CHIP_HAS_SPLIT_INTR_MASK()
57 #if INT_PERF_COUNT < 32 || INT_AUX_PERF_COUNT < 32 || INT_MEM_ERROR >= 32
58 # error Fix assumptions about which word various interrupts are in
60 #define interrupt_mask_set(n) do { \
62 int __mask = 1 << (__n & 0x1f); \
64 __insn_mtspr(SPR_INTERRUPT_MASK_SET_K_0, __mask); \
66 __insn_mtspr(SPR_INTERRUPT_MASK_SET_K_1, __mask); \
68 #define interrupt_mask_reset(n) do { \
70 int __mask = 1 << (__n & 0x1f); \
72 __insn_mtspr(SPR_INTERRUPT_MASK_RESET_K_0, __mask); \
74 __insn_mtspr(SPR_INTERRUPT_MASK_RESET_K_1, __mask); \
76 #define interrupt_mask_check(n) ({ \
79 __insn_mfspr(SPR_INTERRUPT_MASK_K_0) : \
80 __insn_mfspr(SPR_INTERRUPT_MASK_K_1)) \
81 >> (__n & 0x1f)) & 1; \
83 #define interrupt_mask_set_mask(mask) do { \
84 unsigned long long __m = (mask); \
85 __insn_mtspr(SPR_INTERRUPT_MASK_SET_K_0, (unsigned long)(__m)); \
86 __insn_mtspr(SPR_INTERRUPT_MASK_SET_K_1, (unsigned long)(__m>>32)); \
88 #define interrupt_mask_reset_mask(mask) do { \
89 unsigned long long __m = (mask); \
90 __insn_mtspr(SPR_INTERRUPT_MASK_RESET_K_0, (unsigned long)(__m)); \
91 __insn_mtspr(SPR_INTERRUPT_MASK_RESET_K_1, (unsigned long)(__m>>32)); \
93 #define interrupt_mask_save_mask() \
94 (__insn_mfspr(SPR_INTERRUPT_MASK_SET_K_0) | \
95 (((unsigned long long)__insn_mfspr(SPR_INTERRUPT_MASK_SET_K_1))<<32))
96 #define interrupt_mask_restore_mask(mask) do { \
97 unsigned long long __m = (mask); \
98 __insn_mtspr(SPR_INTERRUPT_MASK_K_0, (unsigned long)(__m)); \
99 __insn_mtspr(SPR_INTERRUPT_MASK_K_1, (unsigned long)(__m>>32)); \
102 #define interrupt_mask_set(n) \
103 __insn_mtspr(SPR_INTERRUPT_MASK_SET_K, (1UL << (n)))
104 #define interrupt_mask_reset(n) \
105 __insn_mtspr(SPR_INTERRUPT_MASK_RESET_K, (1UL << (n)))
106 #define interrupt_mask_check(n) \
107 ((__insn_mfspr(SPR_INTERRUPT_MASK_K) >> (n)) & 1)
108 #define interrupt_mask_set_mask(mask) \
109 __insn_mtspr(SPR_INTERRUPT_MASK_SET_K, (mask))
110 #define interrupt_mask_reset_mask(mask) \
111 __insn_mtspr(SPR_INTERRUPT_MASK_RESET_K, (mask))
112 #define interrupt_mask_save_mask() \
113 __insn_mfspr(SPR_INTERRUPT_MASK_K)
114 #define interrupt_mask_restore_mask(mask) \
115 __insn_mtspr(SPR_INTERRUPT_MASK_K, (mask))
129 #define INITIAL_INTERRUPTS_ENABLED INT_MASK(INT_MEM_ERROR)
132 #define arch_local_irq_disable() \
133 interrupt_mask_set_mask(LINUX_MASKABLE_INTERRUPTS)
136 #define arch_local_irq_disable_all() \
137 interrupt_mask_set_mask(-1ULL)
140 #define arch_local_irq_enable() \
141 interrupt_mask_reset_mask(__get_cpu_var(interrupts_enabled_mask))
144 #define arch_local_irq_restore(disabled) do { \
146 arch_local_irq_disable(); \
148 arch_local_irq_enable(); \
152 #define arch_irqs_disabled_flags(flags) ((flags) != 0)
155 #define arch_irqs_disabled() interrupt_mask_check(INT_MEM_ERROR)
158 #define arch_local_save_flags() arch_irqs_disabled()
161 #define arch_local_irq_save() ({ \
162 unsigned long __flags = arch_local_save_flags(); \
163 arch_local_irq_disable(); \
167 #define arch_local_irq_mask(interrupt) \
168 (__get_cpu_var(interrupts_enabled_mask) &= ~INT_MASK(interrupt))
171 #define arch_local_irq_mask_now(interrupt) do { \
172 arch_local_irq_mask(interrupt); \
173 interrupt_mask_set(interrupt); \
177 #define arch_local_irq_unmask(interrupt) \
178 (__get_cpu_var(interrupts_enabled_mask) |= INT_MASK(interrupt))
181 #define arch_local_irq_unmask_now(interrupt) do { \
182 arch_local_irq_unmask(interrupt); \
183 if (!irqs_disabled()) \
184 interrupt_mask_reset(interrupt); \
193 #if INT_MEM_ERROR != 0
194 # error Fix IRQS_DISABLED() macro
198 #define IRQS_DISABLED(tmp) \
199 mfspr tmp, SPR_INTERRUPT_MASK_K; \
203 #define GET_INTERRUPTS_ENABLED_MASK_PTR(reg) \
204 moveli reg, hw2_last(interrupts_enabled_mask); \
205 shl16insli reg, reg, hw1(interrupts_enabled_mask); \
206 shl16insli reg, reg, hw0(interrupts_enabled_mask); \
210 #define IRQ_DISABLE(tmp0, tmp1) \
211 moveli tmp0, hw2_last(LINUX_MASKABLE_INTERRUPTS); \
212 shl16insli tmp0, tmp0, hw1(LINUX_MASKABLE_INTERRUPTS); \
213 shl16insli tmp0, tmp0, hw0(LINUX_MASKABLE_INTERRUPTS); \
214 mtspr SPR_INTERRUPT_MASK_SET_K, tmp0
217 #define IRQ_DISABLE_ALL(tmp) \
219 mtspr SPR_INTERRUPT_MASK_SET_K, tmp
222 #define IRQ_ENABLE_LOAD(tmp0, tmp1) \
223 GET_INTERRUPTS_ENABLED_MASK_PTR(tmp0); \
225 #define IRQ_ENABLE_APPLY(tmp0, tmp1) \
226 mtspr SPR_INTERRUPT_MASK_RESET_K, tmp0
239 #define IRQS_DISABLED(tmp) \
240 mfspr tmp, SPR_INTERRUPT_MASK_K_0; \
241 shri tmp, tmp, INT_MEM_ERROR; \
245 #define GET_INTERRUPTS_ENABLED_MASK_PTR(reg) \
246 moveli reg, lo16(interrupts_enabled_mask); \
247 auli reg, reg, ha16(interrupts_enabled_mask); \
251 #define IRQ_DISABLE(tmp0, tmp1) \
254 moveli tmp1, lo16(LINUX_MASKABLE_INTERRUPTS_HI) \
257 mtspr SPR_INTERRUPT_MASK_SET_K_0, tmp0; \
258 auli tmp1, tmp1, ha16(LINUX_MASKABLE_INTERRUPTS_HI) \
260 mtspr SPR_INTERRUPT_MASK_SET_K_1, tmp1
263 #define IRQ_DISABLE_ALL(tmp) \
265 mtspr SPR_INTERRUPT_MASK_SET_K_0, tmp; \
266 mtspr SPR_INTERRUPT_MASK_SET_K_1, tmp
269 #define IRQ_ENABLE_LOAD(tmp0, tmp1) \
270 GET_INTERRUPTS_ENABLED_MASK_PTR(tmp0); \
276 #define IRQ_ENABLE_APPLY(tmp0, tmp1) \
277 mtspr SPR_INTERRUPT_MASK_RESET_K_0, tmp0; \
278 mtspr SPR_INTERRUPT_MASK_RESET_K_1, tmp1
281 #define IRQ_ENABLE(tmp0, tmp1) \
282 IRQ_ENABLE_LOAD(tmp0, tmp1); \
283 IRQ_ENABLE_APPLY(tmp0, tmp1)
290 #ifdef CONFIG_TRACE_IRQFLAGS
291 # define TRACE_IRQS_ON jal trace_hardirqs_on
292 # define TRACE_IRQS_OFF jal trace_hardirqs_off
294 # define TRACE_IRQS_ON
295 # define TRACE_IRQS_OFF