27 #ifndef __KVM_VCPU_H__
28 #define __KVM_VCPU_H__
30 #include <asm/types.h>
32 #include <asm/processor.h>
53 struct {
unsigned long qp:6,
r1:7, imm7b:7,
r3:2, imm5c:5,
59 struct {
unsigned long qp:6, btype:3, un3:3,
p:1, b2:3, un11:11, x6:6,
60 wh:2,
d:1, un1:1,
major:4; };
65 struct {
unsigned long qp:6, un21:21, x6:6, un4:4,
major:4; };
70 struct {
unsigned long qp:6, imm20:20, :1, x6:6, :3,
i:1,
major:4; };
75 struct {
unsigned long qp:6, imm20:20, :1, x6:6, x3:3,
i:1,
major:4; };
80 struct {
unsigned long qp:6, :7,
r2:7, ar3:7, x6:6, x3:3, :1,
major:4; };
85 struct {
unsigned long qp:6, :7, imm:7, ar3:7, x6:6, x3:3,
s:1,
major:4; };
90 struct {
unsigned long qp:6,
r1:7, :7, ar3:7, x6:6, x3:3, :1,
major:4; };
95 struct {
unsigned long qp:6, :14,
r3:7, x6:6, x3:3, :1,
major:4; };
100 struct {
unsigned long qp:6, :7,
r2:7, ar3:7, x6:6, x3:3, :1,
major:4; };
105 struct {
unsigned long qp:6, :7, imm:7, ar3:7, x4:4, x2:2,
111 struct {
unsigned long qp:6,
r1:7, :7, ar3:7, x6:6, x3:3, :1,
major:4; };
116 struct {
unsigned long qp:6, :7,
r2:7,
cr3:7, x6:6, x3:3, :1,
major:4; };
121 struct {
unsigned long qp:6,
r1:7, :7,
cr3:7, x6:6, x3:3, :1,
major:4; };
126 struct {
unsigned long qp:6, :7,
r2:7, :7, x6:6, x3:3, :1,
major:4; };
132 struct {
unsigned long qp:6,
r1:7, :14, x6:6, x3:3, :1,
major:4; };
137 struct {
unsigned long qp:6, imm20a:20, :1, x4:4, x2:2, x3:3,
143 struct {
unsigned long qp:6, :7,
r2:7, :7, x6:6, x3:3, :1,
major:4; };
148 struct {
unsigned long qp:6, :7,
r2:7,
r3:7, x6:6, x3:3, :1,
major:4; };
153 struct {
unsigned long qp:6,
r1:7, :7,
r3:7, x6:6, x3:3, :1,
major:4; };
158 struct {
unsigned long qp:6, imm:21, x4:4, i2:2, x3:3,
i:1,
major:4; };
163 struct {
unsigned long qp:6, :7,
r2:7,
r3:7, x6:6, x3:3, :1,
major:4; };
168 struct {
unsigned long qp:6,
r1:7, un7:7,
r3:7, x6:6,
169 x3:3, un1:1,
major:4; };
174 struct {
unsigned long qp:6, un14:14,
r3:7, x6:6, x3:3, un1:1,
major:4; };
179 struct {
unsigned long qp:6,
r1:7, un7:7,
r3:7,
x:1, hint:2,
185 struct {
unsigned long qp:6,
r1:7,
r2:7,
r3:7,
x:1, hint:2,
191 struct {
unsigned long qp:6,
r1:7, imm7:7,
r3:7,
i:1, hint:2,
197 struct {
unsigned long qp:6, un7:7,
r2:7,
r3:7,
x:1, hint:2,
203 struct {
unsigned long qp:6, imm7:7,
r2:7,
r3:7,
i:1, hint:2,
209 struct {
unsigned long qp:6,
f1:7, un7:7,
r3:7,
x:1, hint:2,
215 struct {
unsigned long qp:6, :7,
f2:7,
r3:7,
x:1, hint:2,
221 struct {
unsigned long qp:6, imm7:7,
f2:7,
r3:7,
i:1, hint:2,
227 struct {
unsigned long qp:6,
f1:7,
f2:7,
r3:7,
x:1, hint:2,
233 struct {
unsigned long qp:6, :7, imm7:7,
r3:7,
i:1, hint:2,
239 struct {
unsigned long :37,
major:4; }
generic;
276 #define MASK_41 ((unsigned long)0x1ffffffffff)
279 #define VA_MATTR_WB 0x0
280 #define VA_MATTR_UC 0x4
281 #define VA_MATTR_UCE 0x5
282 #define VA_MATTR_WC 0x6
283 #define VA_MATTR_NATPAGE 0x7
285 #define PMASK(size) (~((size) - 1))
286 #define PSIZE(size) (1UL<<(size))
287 #define CLEARLSB(ppn, nbits) (((ppn) >> (nbits)) << (nbits))
288 #define PAGEALIGN(va, ps) CLEARLSB(va, ps)
289 #define PAGE_FLAGS_RV_MASK (0x2|(0x3UL<<50)|(((1UL<<11)-1)<<53))
290 #define _PAGE_MA_ST (0x1 << 2)
292 #define ARCH_PAGE_SHIFT 12
294 #define INVALID_TI_TAG (1UL << 63)
296 #define VTLB_PTE_P_BIT 0
297 #define VTLB_PTE_IO_BIT 60
298 #define VTLB_PTE_IO (1UL<<VTLB_PTE_IO_BIT)
299 #define VTLB_PTE_P (1UL<<VTLB_PTE_P_BIT)
301 #define vcpu_quick_region_check(_tr_regions,_ifa) \
302 (_tr_regions & (1 << ((unsigned long)_ifa >> 61)))
304 #define vcpu_quick_region_set(_tr_regions,_ifa) \
305 do {_tr_regions |= (1 << ((unsigned long)_ifa >> 61)); } while (0)
319 static inline u64 __gpfn_is_io(
u64 gpfn)
331 #define IA64_NO_FAULT 0
334 #define VMM_RBS_OFFSET ((VMM_TASK_SIZE + 15) & ~15)
342 #define GUEST_IN_PHY 0x1
343 #define GUEST_PHY_EMUL 0x2
345 #define current_vcpu ((struct kvm_vcpu *) ia64_getreg(_IA64_REG_TP))
348 #define VRN_MASK 0xe000000000000000
358 #define IRQ_NO_MASKED 0
359 #define IRQ_MASKED_BY_VTPR 1
360 #define IRQ_MASKED_BY_INSVC 2
362 #define PTA_BASE_SHIFT 15
364 #define IA64_PSR_VM_BIT 46
365 #define IA64_PSR_VM (__IA64_UL(1) << IA64_PSR_VM_BIT)
368 #define IA64_IFS_V_BIT 63
369 #define IA64_IFS_V (__IA64_UL(1) << IA64_IFS_V_BIT)
371 #define PHY_PAGE_UC (_PAGE_A|_PAGE_D|_PAGE_P|_PAGE_MA_UC|_PAGE_AR_RWX)
372 #define PHY_PAGE_WB (_PAGE_A|_PAGE_D|_PAGE_P|_PAGE_MA_WB|_PAGE_AR_RWX)
376 #include <asm/gcc_intrin.h>
378 #define is_physical_mode(v) \
379 ((v->arch.mode_flags) & GUEST_IN_PHY)
381 #define is_virtual_mode(v) \
382 (!is_physical_mode(v))
384 #define MODE_IND(psr) \
385 (((psr).it << 2) + ((psr).dt << 1) + (psr).rt)
388 #define _vmm_raw_spin_lock(x) do {}while(0)
389 #define _vmm_raw_spin_unlock(x) do {}while(0)
392 volatile unsigned int lock;
394 #define _vmm_raw_spin_lock(x) \
396 __u32 *ia64_spinlock_ptr = (__u32 *) (x); \
397 __u64 ia64_spinlock_val; \
398 ia64_spinlock_val = ia64_cmpxchg4_acq(ia64_spinlock_ptr, 1, 0);\
399 if (unlikely(ia64_spinlock_val)) { \
401 while (*ia64_spinlock_ptr) \
403 ia64_spinlock_val = \
404 ia64_cmpxchg4_acq(ia64_spinlock_ptr, 1, 0);\
405 } while (ia64_spinlock_val); \
409 #define _vmm_raw_spin_unlock(x) \
411 ((vmm_spinlock_t *)x)->lock = 0; } \
424 unsigned long off : 60;
431 #define __kvm_pa(x) ({union kvm_va _v; _v.l = (long) (x); \
432 _v.f.reg = 0; _v.l; })
433 #define __kvm_va(x) ({union kvm_va _v; _v.l = (long) (x); \
434 _v.f.reg = -1; _v.p; })
436 #define _REGION_ID(x) ({union ia64_rr _v; _v.val = (long)(x); \
438 #define _REGION_PAGE_SIZE(x) ({union ia64_rr _v; _v.val = (long)(x); \
440 #define _REGION_HW_WALKER(x) ({union ia64_rr _v; _v.val = (long)(x); \
446 #define VCPU(_v, _x) ((_v)->arch.vpd->_x)
447 #define VMX(_v, _x) ((_v)->arch._x)
449 #define VLSAPIC_INSVC(vcpu, i) ((vcpu)->arch.insvc[i])
450 #define VLSAPIC_XTP(_v) VMX(_v, xtp)
452 static inline unsigned long itir_ps(
unsigned long itir)
454 return ((itir >> 2) & 0x3f);
462 static inline u64 vcpu_get_itir(
struct kvm_vcpu *vcpu)
464 return ((
u64)
VCPU(vcpu, itir));
472 static inline u64 vcpu_get_ifa(
struct kvm_vcpu *vcpu)
482 static inline u64 vcpu_get_iva(
struct kvm_vcpu *vcpu)
487 static inline u64 vcpu_get_pta(
struct kvm_vcpu *vcpu)
492 static inline u64 vcpu_get_lid(
struct kvm_vcpu *vcpu)
497 static inline u64 vcpu_get_tpr(
struct kvm_vcpu *vcpu)
502 static inline u64 vcpu_get_eoi(
struct kvm_vcpu *vcpu)
507 static inline u64 vcpu_get_irr0(
struct kvm_vcpu *vcpu)
512 static inline u64 vcpu_get_irr1(
struct kvm_vcpu *vcpu)
517 static inline u64 vcpu_get_irr2(
struct kvm_vcpu *vcpu)
522 static inline u64 vcpu_get_irr3(
struct kvm_vcpu *vcpu)
570 return vcpu->
arch.vrr[reg>>61];
624 static inline unsigned long vrrtomrr(
unsigned long val)
628 rr.rid = (rr.rid << 4) | 0
xe;
636 static inline int highest_bits(
int *
dat)
642 for (i = 7; i >= 0 ; i--) {
646 return i * 32 + bitnum - 1;
656 static inline int is_higher_irq(
int pending,
int inservice)
658 return ((pending > inservice)
663 static inline int is_higher_class(
int pending,
int mic)
665 return ((pending >> 4) > mic);
672 static inline int highest_pending_irq(
struct kvm_vcpu *vcpu)
679 return highest_bits((
int *)&
VCPU(vcpu,
irr[0]));
682 static inline int highest_inservice_irq(
struct kvm_vcpu *vcpu)
689 return highest_bits((
int *)&(
VMX(vcpu, insvc[0])));
714 u64 va,
int is_data);
716 u64 ps,
int is_data);