24 #include <asm/switch_to.h>
26 #define OP_19_XOP_RFID 18
27 #define OP_19_XOP_RFI 50
29 #define OP_31_XOP_MFMSR 83
30 #define OP_31_XOP_MTMSR 146
31 #define OP_31_XOP_MTMSRD 178
32 #define OP_31_XOP_MTSR 210
33 #define OP_31_XOP_MTSRIN 242
34 #define OP_31_XOP_TLBIEL 274
35 #define OP_31_XOP_TLBIE 306
36 #define OP_31_XOP_SLBMTE 402
37 #define OP_31_XOP_SLBIE 434
38 #define OP_31_XOP_SLBIA 498
39 #define OP_31_XOP_MFSR 595
40 #define OP_31_XOP_MFSRIN 659
41 #define OP_31_XOP_DCBA 758
42 #define OP_31_XOP_SLBMFEV 851
43 #define OP_31_XOP_EIOIO 854
44 #define OP_31_XOP_SLBMFEE 915
47 #define OP_31_XOP_DCBZ 1010
87 unsigned int inst,
int *advance)
90 int rt = get_rt(inst);
91 int rs = get_rs(inst);
92 int ra = get_ra(inst);
93 int rb = get_rb(inst);
95 switch (get_op(inst)) {
97 switch (get_xop(inst)) {
100 kvmppc_set_pc(vcpu, vcpu->
arch.shared->srr0);
111 switch (get_xop(inst)) {
113 kvmppc_set_gpr(vcpu, rt, vcpu->
arch.shared->msr);
117 ulong rs_val = kvmppc_get_gpr(vcpu, rs);
118 if (inst & 0x10000) {
122 vcpu->
arch.shared->msr = new_msr;
134 srnum = kvmppc_get_field(inst, 12 + 32, 15 + 32);
135 if (vcpu->
arch.mmu.mfsrin) {
137 sr = vcpu->
arch.mmu.mfsrin(vcpu, srnum);
138 kvmppc_set_gpr(vcpu, rt, sr);
146 srnum = (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf;
147 if (vcpu->
arch.mmu.mfsrin) {
149 sr = vcpu->
arch.mmu.mfsrin(vcpu, srnum);
150 kvmppc_set_gpr(vcpu, rt, sr);
155 vcpu->
arch.mmu.mtsrin(vcpu,
157 kvmppc_get_gpr(vcpu, rs));
160 vcpu->
arch.mmu.mtsrin(vcpu,
161 (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf,
162 kvmppc_get_gpr(vcpu, rs));
167 bool large = (inst & 0x00200000) ?
true :
false;
169 vcpu->
arch.mmu.tlbie(vcpu, addr, large);
175 if (!vcpu->
arch.mmu.slbmte)
178 vcpu->
arch.mmu.slbmte(vcpu,
179 kvmppc_get_gpr(vcpu, rs),
180 kvmppc_get_gpr(vcpu, rb));
183 if (!vcpu->
arch.mmu.slbie)
186 vcpu->
arch.mmu.slbie(vcpu,
187 kvmppc_get_gpr(vcpu, rb));
190 if (!vcpu->
arch.mmu.slbia)
193 vcpu->
arch.mmu.slbia(vcpu);
196 if (!vcpu->
arch.mmu.slbmfee) {
201 rb_val = kvmppc_get_gpr(vcpu, rb);
202 t = vcpu->
arch.mmu.slbmfee(vcpu, rb_val);
203 kvmppc_set_gpr(vcpu, rt, t);
207 if (!vcpu->
arch.mmu.slbmfev) {
212 rb_val = kvmppc_get_gpr(vcpu, rb);
213 t = vcpu->
arch.mmu.slbmfev(vcpu, rb_val);
214 kvmppc_set_gpr(vcpu, rt, t);
222 ulong rb_val = kvmppc_get_gpr(vcpu, rb);
225 u32 zeros[8] = { 0, 0, 0, 0, 0, 0, 0, 0 };
230 ra_val = kvmppc_get_gpr(vcpu, ra);
232 addr = (ra_val + rb_val) & ~31ULL;
233 if (!(vcpu->
arch.shared->msr & MSR_SF))
237 r =
kvmppc_st(vcpu, &addr, 32, zeros,
true);
241 svcpu = svcpu_get(vcpu);
246 dsisr = DSISR_ISSTORE;
248 dsisr |= DSISR_NOHPTE;
249 else if (r == -
EPERM)
250 dsisr |= DSISR_PROTFAULT;
252 vcpu->
arch.shared->dsisr = dsisr;
281 u32 bl = (val >> 2) & 0x7ff;
283 bat->
bepi = val & 0xfffe0000;
284 bat->
vs = (val & 2) ? 1 : 0;
285 bat->
vp = (val & 1) ? 1 : 0;
286 bat->
raw = (bat->
raw & 0xffffffff00000000ULL) | val;
289 bat->
brpn = val & 0xfffe0000;
290 bat->
wimg = (val >> 3) & 0xf;
292 bat->
raw = (bat->
raw & 0x00000000ffffffffULL) | ((
u64)val << 32);
302 case SPRN_IBAT0U ... SPRN_IBAT3L:
303 bat = &vcpu_book3s->
ibat[(sprn - SPRN_IBAT0U) / 2];
305 case SPRN_IBAT4U ... SPRN_IBAT7L:
306 bat = &vcpu_book3s->
ibat[4 + ((sprn - SPRN_IBAT4U) / 2)];
308 case SPRN_DBAT0U ... SPRN_DBAT3L:
309 bat = &vcpu_book3s->
dbat[(sprn - SPRN_DBAT0U) / 2];
311 case SPRN_DBAT4U ... SPRN_DBAT7L:
312 bat = &vcpu_book3s->
dbat[4 + ((sprn - SPRN_DBAT4U) / 2)];
329 to_book3s(vcpu)->sdr1 = spr_val;
332 vcpu->
arch.shared->dsisr = spr_val;
335 vcpu->
arch.shared->dar = spr_val;
338 to_book3s(vcpu)->hior = spr_val;
340 case SPRN_IBAT0U ... SPRN_IBAT3L:
341 case SPRN_IBAT4U ... SPRN_IBAT7L:
342 case SPRN_DBAT0U ... SPRN_DBAT3L:
343 case SPRN_DBAT4U ... SPRN_DBAT7L:
345 struct kvmppc_bat *bat = kvmppc_find_bat(vcpu, sprn);
355 to_book3s(vcpu)->hid[0] = spr_val;
358 to_book3s(vcpu)->hid[1] = spr_val;
361 to_book3s(vcpu)->hid[2] = spr_val;
363 case SPRN_HID2_GEKKO:
364 to_book3s(vcpu)->hid[2] = spr_val;
366 switch (vcpu->
arch.pvr) {
378 }
else if (spr_val & (1 << 29)) {
388 case SPRN_HID4_GEKKO:
389 to_book3s(vcpu)->hid[4] = spr_val;
392 to_book3s(vcpu)->hid[5] = spr_val;
394 if (vcpu->
arch.mmu.is_dcbz32(vcpu) &&
406 to_book3s(vcpu)->gqr[sprn -
SPRN_GQR0] = spr_val;
415 case SPRN_MMCR0_GEKKO:
416 case SPRN_MMCR1_GEKKO:
417 case SPRN_PMC1_GEKKO:
418 case SPRN_PMC2_GEKKO:
419 case SPRN_PMC3_GEKKO:
420 case SPRN_PMC4_GEKKO:
421 case SPRN_WPAR_GEKKO:
440 case SPRN_IBAT0U ... SPRN_IBAT3L:
441 case SPRN_IBAT4U ... SPRN_IBAT7L:
442 case SPRN_DBAT0U ... SPRN_DBAT3L:
443 case SPRN_DBAT4U ... SPRN_DBAT7L:
445 struct kvmppc_bat *bat = kvmppc_find_bat(vcpu, sprn);
448 *spr_val = bat->
raw >> 32;
457 *spr_val = to_book3s(vcpu)->sdr1;
460 *spr_val = vcpu->
arch.shared->dsisr;
463 *spr_val = vcpu->
arch.shared->dar;
466 *spr_val = to_book3s(vcpu)->hior;
469 *spr_val = to_book3s(vcpu)->hid[0];
472 *spr_val = to_book3s(vcpu)->hid[1];
475 case SPRN_HID2_GEKKO:
476 *spr_val = to_book3s(vcpu)->hid[2];
479 case SPRN_HID4_GEKKO:
480 *spr_val = to_book3s(vcpu)->hid[4];
483 *spr_val = to_book3s(vcpu)->hid[5];
497 *spr_val = to_book3s(vcpu)->gqr[sprn -
SPRN_GQR0];
505 case SPRN_MMCR0_GEKKO:
506 case SPRN_MMCR1_GEKKO:
507 case SPRN_PMC1_GEKKO:
508 case SPRN_PMC2_GEKKO:
509 case SPRN_PMC3_GEKKO:
510 case SPRN_PMC4_GEKKO:
511 case SPRN_WPAR_GEKKO:
543 switch (get_op(inst)) {
549 dsisr |= (inst >> 12) & 0x4000;
550 dsisr |= (inst >> 17) & 0x3c00;
554 dsisr |= (inst << 14) & 0x18000;
555 dsisr |= (inst << 8) & 0x04000;
556 dsisr |= (inst << 3) & 0x03c00;
563 dsisr |= (inst >> 16) & 0x03ff;
574 switch (get_op(inst)) {
580 dar = kvmppc_get_gpr(vcpu, ra);
585 dar = kvmppc_get_gpr(vcpu, ra);
586 dar += kvmppc_get_gpr(vcpu, rb);