1 #ifndef __ALPHA_MMU_CONTEXT_H
2 #define __ALPHA_MMU_CONTEXT_H
10 #include <asm/machvec.h>
11 #include <asm/compiler.h>
20 #ifndef __EXTERN_INLINE
25 static inline unsigned long
33 "call_pal %2 #__reload_thread"
36 :
"$1",
"$22",
"$23",
"$24",
"$25");
63 #define EV4_MAX_ASN 63
64 #define EV5_MAX_ASN 127
65 #define EV6_MAX_ASN 255
67 #ifdef CONFIG_ALPHA_GENERIC
68 # define MAX_ASN (alpha_mv.max_asn)
70 # ifdef CONFIG_ALPHA_EV4
71 # define MAX_ASN EV4_MAX_ASN
72 # elif defined(CONFIG_ALPHA_EV5)
73 # define MAX_ASN EV5_MAX_ASN
75 # define MAX_ASN EV6_MAX_ASN
89 #define cpu_last_asn(cpuid) (cpu_data[cpuid].last_asn)
92 #define cpu_last_asn(cpuid) last_asn
95 #define WIDTH_HARDWARE_ASN 8
96 #define ASN_FIRST_VERSION (1UL << WIDTH_HARDWARE_ASN)
97 #define HARDWARE_ASN_MASK ((1UL << WIDTH_HARDWARE_ASN) - 1)
111 #ifndef __EXTERN_INLINE
112 #define __EXTERN_INLINE extern inline
113 #define __MMU_EXTERN_INLINE
116 extern inline unsigned long
120 unsigned long next = asn + 1;
173 if (prev_mm != next_mm)
184 #define check_mmu_context() \
186 int cpu = smp_processor_id(); \
187 cpu_data[cpu].asn_lock = 0; \
189 if (cpu_data[cpu].need_new_asn) { \
190 struct mm_struct * mm = current->active_mm; \
191 cpu_data[cpu].need_new_asn = 0; \
192 if (!mm->context[cpu]) \
193 __load_new_mm_context(mm); \
197 #define check_mmu_context() do { } while(0)
213 #define deactivate_mm(tsk,mm) do { } while (0)
215 #ifdef CONFIG_ALPHA_GENERIC
216 # define switch_mm(a,b,c) alpha_mv.mv_switch_mm((a),(b),(c))
217 # define activate_mm(x,y) alpha_mv.mv_activate_mm((x),(y))
219 # ifdef CONFIG_ALPHA_EV4
220 # define switch_mm(a,b,c) ev4_switch_mm((a),(b),(c))
221 # define activate_mm(x,y) ev4_activate_mm((x),(y))
223 # define switch_mm(a,b,c) ev5_switch_mm((a),(b),(c))
224 # define activate_mm(x,y) ev5_activate_mm((x),(y))
254 #ifdef __MMU_EXTERN_INLINE
255 #undef __EXTERN_INLINE
256 #undef __MMU_EXTERN_INLINE