22 #include <linux/hrtimer.h>
23 #include <linux/types.h>
24 #include <linux/string.h>
30 #include <asm/byteorder.h>
39 #define OP_31_XOP_TRAP 4
40 #define OP_31_XOP_LWZX 23
41 #define OP_31_XOP_TRAP_64 68
42 #define OP_31_XOP_LBZX 87
43 #define OP_31_XOP_STWX 151
44 #define OP_31_XOP_STBX 215
45 #define OP_31_XOP_LBZUX 119
46 #define OP_31_XOP_STBUX 247
47 #define OP_31_XOP_LHZX 279
48 #define OP_31_XOP_LHZUX 311
49 #define OP_31_XOP_MFSPR 339
50 #define OP_31_XOP_LHAX 343
51 #define OP_31_XOP_STHX 407
52 #define OP_31_XOP_STHUX 439
53 #define OP_31_XOP_MTSPR 467
54 #define OP_31_XOP_DCBI 470
55 #define OP_31_XOP_LWBRX 534
56 #define OP_31_XOP_TLBSYNC 566
57 #define OP_31_XOP_STWBRX 662
58 #define OP_31_XOP_LHBRX 790
59 #define OP_31_XOP_STHBRX 918
80 unsigned long dec_nsec;
81 unsigned long long dec_time;
86 #ifdef CONFIG_PPC_BOOK3S
91 if (vcpu->
arch.dec & 0x80000000) {
99 if (vcpu->
arch.dec == 0)
109 dec_time = vcpu->
arch.dec;
119 vcpu->
arch.dec_jiffies = get_tb();
124 u64 jd = tb - vcpu->
arch.dec_jiffies;
127 if (vcpu->
arch.dec < jd)
131 return vcpu->
arch.dec - jd;
152 u32 inst = kvmppc_get_last_inst(vcpu);
153 int ra = get_ra(inst);
154 int rs = get_rs(inst);
155 int rt = get_rt(inst);
156 int sprn = get_sprn(inst);
164 pr_debug(
"Emulating opcode %d / %d\n", get_op(inst), get_xop(inst));
166 switch (get_op(inst)) {
168 #ifdef CONFIG_PPC_BOOK3S
173 vcpu->
arch.shared->esr | ESR_PTR);
179 switch (get_xop(inst)) {
185 #ifdef CONFIG_PPC_BOOK3S
189 vcpu->
arch.shared->esr | ESR_PTR);
203 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
208 kvmppc_get_gpr(vcpu, rs),
214 kvmppc_get_gpr(vcpu, rs),
220 kvmppc_get_gpr(vcpu, rs),
222 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
235 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
241 spr_val = vcpu->
arch.shared->srr0;
244 spr_val = vcpu->
arch.shared->srr1;
247 spr_val = vcpu->
arch.pvr;
260 spr_val = get_tb() >> 32;
267 spr_val = vcpu->
arch.shared->sprg0;
270 spr_val = vcpu->
arch.shared->sprg1;
273 spr_val = vcpu->
arch.shared->sprg2;
276 spr_val = vcpu->
arch.shared->sprg3;
293 kvmppc_set_gpr(vcpu, rt, spr_val);
299 kvmppc_get_gpr(vcpu, rs),
305 kvmppc_get_gpr(vcpu, rs),
307 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
311 spr_val = kvmppc_get_gpr(vcpu, rs);
314 vcpu->
arch.shared->srr0 = spr_val;
317 vcpu->
arch.shared->srr1 = spr_val;
322 case SPRN_TBWL:
break;
323 case SPRN_TBWU:
break;
325 case SPRN_MSSSR0:
break;
328 vcpu->
arch.dec = spr_val;
333 vcpu->
arch.shared->sprg0 = spr_val;
336 vcpu->
arch.shared->sprg1 = spr_val;
339 vcpu->
arch.shared->sprg2 = spr_val;
342 vcpu->
arch.shared->sprg3 = spr_val;
373 kvmppc_get_gpr(vcpu, rs),
383 kvmppc_get_gpr(vcpu, rs),
405 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
414 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
419 kvmppc_get_gpr(vcpu, rs),
427 kvmppc_get_gpr(vcpu, rs),
433 kvmppc_get_gpr(vcpu, rs),
435 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
440 kvmppc_get_gpr(vcpu, rs),
446 kvmppc_get_gpr(vcpu, rs),
448 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
457 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
466 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
471 kvmppc_get_gpr(vcpu, rs),
477 kvmppc_get_gpr(vcpu, rs),
479 kvmppc_set_gpr(vcpu, ra, vcpu->
arch.vaddr_accessed);
493 "(op %d xop %d)\n", inst, get_op(inst), get_xop(inst));
498 trace_kvm_ppc_instr(inst, kvmppc_get_pc(vcpu), emulated);
502 kvmppc_set_pc(vcpu, kvmppc_get_pc(vcpu) + 4);