KVM: PPC: Convert DAR to shared page.
[pandora-kernel.git] / arch / powerpc / kvm / book3s_paired_singles.c
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License, version 2, as
4  * published by the Free Software Foundation.
5  *
6  * This program is distributed in the hope that it will be useful,
7  * but WITHOUT ANY WARRANTY; without even the implied warranty of
8  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
9  * GNU General Public License for more details.
10  *
11  * You should have received a copy of the GNU General Public License
12  * along with this program; if not, write to the Free Software
13  * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
14  *
15  * Copyright Novell Inc 2010
16  *
17  * Authors: Alexander Graf <agraf@suse.de>
18  */
19
20 #include <asm/kvm.h>
21 #include <asm/kvm_ppc.h>
22 #include <asm/disassemble.h>
23 #include <asm/kvm_book3s.h>
24 #include <asm/kvm_fpu.h>
25 #include <asm/reg.h>
26 #include <asm/cacheflush.h>
27 #include <linux/vmalloc.h>
28
29 /* #define DEBUG */
30
31 #ifdef DEBUG
32 #define dprintk printk
33 #else
34 #define dprintk(...) do { } while(0);
35 #endif
36
37 #define OP_LFS                  48
38 #define OP_LFSU                 49
39 #define OP_LFD                  50
40 #define OP_LFDU                 51
41 #define OP_STFS                 52
42 #define OP_STFSU                53
43 #define OP_STFD                 54
44 #define OP_STFDU                55
45 #define OP_PSQ_L                56
46 #define OP_PSQ_LU               57
47 #define OP_PSQ_ST               60
48 #define OP_PSQ_STU              61
49
50 #define OP_31_LFSX              535
51 #define OP_31_LFSUX             567
52 #define OP_31_LFDX              599
53 #define OP_31_LFDUX             631
54 #define OP_31_STFSX             663
55 #define OP_31_STFSUX            695
56 #define OP_31_STFX              727
57 #define OP_31_STFUX             759
58 #define OP_31_LWIZX             887
59 #define OP_31_STFIWX            983
60
61 #define OP_59_FADDS             21
62 #define OP_59_FSUBS             20
63 #define OP_59_FSQRTS            22
64 #define OP_59_FDIVS             18
65 #define OP_59_FRES              24
66 #define OP_59_FMULS             25
67 #define OP_59_FRSQRTES          26
68 #define OP_59_FMSUBS            28
69 #define OP_59_FMADDS            29
70 #define OP_59_FNMSUBS           30
71 #define OP_59_FNMADDS           31
72
73 #define OP_63_FCMPU             0
74 #define OP_63_FCPSGN            8
75 #define OP_63_FRSP              12
76 #define OP_63_FCTIW             14
77 #define OP_63_FCTIWZ            15
78 #define OP_63_FDIV              18
79 #define OP_63_FADD              21
80 #define OP_63_FSQRT             22
81 #define OP_63_FSEL              23
82 #define OP_63_FRE               24
83 #define OP_63_FMUL              25
84 #define OP_63_FRSQRTE           26
85 #define OP_63_FMSUB             28
86 #define OP_63_FMADD             29
87 #define OP_63_FNMSUB            30
88 #define OP_63_FNMADD            31
89 #define OP_63_FCMPO             32
90 #define OP_63_MTFSB1            38 // XXX
91 #define OP_63_FSUB              20
92 #define OP_63_FNEG              40
93 #define OP_63_MCRFS             64
94 #define OP_63_MTFSB0            70
95 #define OP_63_FMR               72
96 #define OP_63_MTFSFI            134
97 #define OP_63_FABS              264
98 #define OP_63_MFFS              583
99 #define OP_63_MTFSF             711
100
101 #define OP_4X_PS_CMPU0          0
102 #define OP_4X_PSQ_LX            6
103 #define OP_4XW_PSQ_STX          7
104 #define OP_4A_PS_SUM0           10
105 #define OP_4A_PS_SUM1           11
106 #define OP_4A_PS_MULS0          12
107 #define OP_4A_PS_MULS1          13
108 #define OP_4A_PS_MADDS0         14
109 #define OP_4A_PS_MADDS1         15
110 #define OP_4A_PS_DIV            18
111 #define OP_4A_PS_SUB            20
112 #define OP_4A_PS_ADD            21
113 #define OP_4A_PS_SEL            23
114 #define OP_4A_PS_RES            24
115 #define OP_4A_PS_MUL            25
116 #define OP_4A_PS_RSQRTE         26
117 #define OP_4A_PS_MSUB           28
118 #define OP_4A_PS_MADD           29
119 #define OP_4A_PS_NMSUB          30
120 #define OP_4A_PS_NMADD          31
121 #define OP_4X_PS_CMPO0          32
122 #define OP_4X_PSQ_LUX           38
123 #define OP_4XW_PSQ_STUX         39
124 #define OP_4X_PS_NEG            40
125 #define OP_4X_PS_CMPU1          64
126 #define OP_4X_PS_MR             72
127 #define OP_4X_PS_CMPO1          96
128 #define OP_4X_PS_NABS           136
129 #define OP_4X_PS_ABS            264
130 #define OP_4X_PS_MERGE00        528
131 #define OP_4X_PS_MERGE01        560
132 #define OP_4X_PS_MERGE10        592
133 #define OP_4X_PS_MERGE11        624
134
135 #define SCALAR_NONE             0
136 #define SCALAR_HIGH             (1 << 0)
137 #define SCALAR_LOW              (1 << 1)
138 #define SCALAR_NO_PS0           (1 << 2)
139 #define SCALAR_NO_PS1           (1 << 3)
140
141 #define GQR_ST_TYPE_MASK        0x00000007
142 #define GQR_ST_TYPE_SHIFT       0
143 #define GQR_ST_SCALE_MASK       0x00003f00
144 #define GQR_ST_SCALE_SHIFT      8
145 #define GQR_LD_TYPE_MASK        0x00070000
146 #define GQR_LD_TYPE_SHIFT       16
147 #define GQR_LD_SCALE_MASK       0x3f000000
148 #define GQR_LD_SCALE_SHIFT      24
149
150 #define GQR_QUANTIZE_FLOAT      0
151 #define GQR_QUANTIZE_U8         4
152 #define GQR_QUANTIZE_U16        5
153 #define GQR_QUANTIZE_S8         6
154 #define GQR_QUANTIZE_S16        7
155
156 #define FPU_LS_SINGLE           0
157 #define FPU_LS_DOUBLE           1
158 #define FPU_LS_SINGLE_LOW       2
159
160 static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt)
161 {
162         kvm_cvt_df(&vcpu->arch.fpr[rt], &vcpu->arch.qpr[rt], &vcpu->arch.fpscr);
163 }
164
165 static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store)
166 {
167         u64 dsisr;
168         struct kvm_vcpu_arch_shared *shared = vcpu->arch.shared;
169
170         shared->msr = kvmppc_set_field(shared->msr, 33, 36, 0);
171         shared->msr = kvmppc_set_field(shared->msr, 42, 47, 0);
172         shared->dar = eaddr;
173         /* Page Fault */
174         dsisr = kvmppc_set_field(0, 33, 33, 1);
175         if (is_store)
176                 shared->dsisr = kvmppc_set_field(dsisr, 38, 38, 1);
177         kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE);
178 }
179
180 static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
181                                    int rs, ulong addr, int ls_type)
182 {
183         int emulated = EMULATE_FAIL;
184         int r;
185         char tmp[8];
186         int len = sizeof(u32);
187
188         if (ls_type == FPU_LS_DOUBLE)
189                 len = sizeof(u64);
190
191         /* read from memory */
192         r = kvmppc_ld(vcpu, &addr, len, tmp, true);
193         vcpu->arch.paddr_accessed = addr;
194
195         if (r < 0) {
196                 kvmppc_inject_pf(vcpu, addr, false);
197                 goto done_load;
198         } else if (r == EMULATE_DO_MMIO) {
199                 emulated = kvmppc_handle_load(run, vcpu, KVM_REG_FPR | rs, len, 1);
200                 goto done_load;
201         }
202
203         emulated = EMULATE_DONE;
204
205         /* put in registers */
206         switch (ls_type) {
207         case FPU_LS_SINGLE:
208                 kvm_cvt_fd((u32*)tmp, &vcpu->arch.fpr[rs], &vcpu->arch.fpscr);
209                 vcpu->arch.qpr[rs] = *((u32*)tmp);
210                 break;
211         case FPU_LS_DOUBLE:
212                 vcpu->arch.fpr[rs] = *((u64*)tmp);
213                 break;
214         }
215
216         dprintk(KERN_INFO "KVM: FPR_LD [0x%llx] at 0x%lx (%d)\n", *(u64*)tmp,
217                           addr, len);
218
219 done_load:
220         return emulated;
221 }
222
223 static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
224                                     int rs, ulong addr, int ls_type)
225 {
226         int emulated = EMULATE_FAIL;
227         int r;
228         char tmp[8];
229         u64 val;
230         int len;
231
232         switch (ls_type) {
233         case FPU_LS_SINGLE:
234                 kvm_cvt_df(&vcpu->arch.fpr[rs], (u32*)tmp, &vcpu->arch.fpscr);
235                 val = *((u32*)tmp);
236                 len = sizeof(u32);
237                 break;
238         case FPU_LS_SINGLE_LOW:
239                 *((u32*)tmp) = vcpu->arch.fpr[rs];
240                 val = vcpu->arch.fpr[rs] & 0xffffffff;
241                 len = sizeof(u32);
242                 break;
243         case FPU_LS_DOUBLE:
244                 *((u64*)tmp) = vcpu->arch.fpr[rs];
245                 val = vcpu->arch.fpr[rs];
246                 len = sizeof(u64);
247                 break;
248         default:
249                 val = 0;
250                 len = 0;
251         }
252
253         r = kvmppc_st(vcpu, &addr, len, tmp, true);
254         vcpu->arch.paddr_accessed = addr;
255         if (r < 0) {
256                 kvmppc_inject_pf(vcpu, addr, true);
257         } else if (r == EMULATE_DO_MMIO) {
258                 emulated = kvmppc_handle_store(run, vcpu, val, len, 1);
259         } else {
260                 emulated = EMULATE_DONE;
261         }
262
263         dprintk(KERN_INFO "KVM: FPR_ST [0x%llx] at 0x%lx (%d)\n",
264                           val, addr, len);
265
266         return emulated;
267 }
268
269 static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
270                                    int rs, ulong addr, bool w, int i)
271 {
272         int emulated = EMULATE_FAIL;
273         int r;
274         float one = 1.0;
275         u32 tmp[2];
276
277         /* read from memory */
278         if (w) {
279                 r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true);
280                 memcpy(&tmp[1], &one, sizeof(u32));
281         } else {
282                 r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true);
283         }
284         vcpu->arch.paddr_accessed = addr;
285         if (r < 0) {
286                 kvmppc_inject_pf(vcpu, addr, false);
287                 goto done_load;
288         } else if ((r == EMULATE_DO_MMIO) && w) {
289                 emulated = kvmppc_handle_load(run, vcpu, KVM_REG_FPR | rs, 4, 1);
290                 vcpu->arch.qpr[rs] = tmp[1];
291                 goto done_load;
292         } else if (r == EMULATE_DO_MMIO) {
293                 emulated = kvmppc_handle_load(run, vcpu, KVM_REG_FQPR | rs, 8, 1);
294                 goto done_load;
295         }
296
297         emulated = EMULATE_DONE;
298
299         /* put in registers */
300         kvm_cvt_fd(&tmp[0], &vcpu->arch.fpr[rs], &vcpu->arch.fpscr);
301         vcpu->arch.qpr[rs] = tmp[1];
302
303         dprintk(KERN_INFO "KVM: PSQ_LD [0x%x, 0x%x] at 0x%lx (%d)\n", tmp[0],
304                           tmp[1], addr, w ? 4 : 8);
305
306 done_load:
307         return emulated;
308 }
309
310 static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
311                                     int rs, ulong addr, bool w, int i)
312 {
313         int emulated = EMULATE_FAIL;
314         int r;
315         u32 tmp[2];
316         int len = w ? sizeof(u32) : sizeof(u64);
317
318         kvm_cvt_df(&vcpu->arch.fpr[rs], &tmp[0], &vcpu->arch.fpscr);
319         tmp[1] = vcpu->arch.qpr[rs];
320
321         r = kvmppc_st(vcpu, &addr, len, tmp, true);
322         vcpu->arch.paddr_accessed = addr;
323         if (r < 0) {
324                 kvmppc_inject_pf(vcpu, addr, true);
325         } else if ((r == EMULATE_DO_MMIO) && w) {
326                 emulated = kvmppc_handle_store(run, vcpu, tmp[0], 4, 1);
327         } else if (r == EMULATE_DO_MMIO) {
328                 u64 val = ((u64)tmp[0] << 32) | tmp[1];
329                 emulated = kvmppc_handle_store(run, vcpu, val, 8, 1);
330         } else {
331                 emulated = EMULATE_DONE;
332         }
333
334         dprintk(KERN_INFO "KVM: PSQ_ST [0x%x, 0x%x] at 0x%lx (%d)\n",
335                           tmp[0], tmp[1], addr, len);
336
337         return emulated;
338 }
339
340 /*
341  * Cuts out inst bits with ordering according to spec.
342  * That means the leftmost bit is zero. All given bits are included.
343  */
344 static inline u32 inst_get_field(u32 inst, int msb, int lsb)
345 {
346         return kvmppc_get_field(inst, msb + 32, lsb + 32);
347 }
348
349 /*
350  * Replaces inst bits with ordering according to spec.
351  */
352 static inline u32 inst_set_field(u32 inst, int msb, int lsb, int value)
353 {
354         return kvmppc_set_field(inst, msb + 32, lsb + 32, value);
355 }
356
357 bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst)
358 {
359         if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE))
360                 return false;
361
362         switch (get_op(inst)) {
363         case OP_PSQ_L:
364         case OP_PSQ_LU:
365         case OP_PSQ_ST:
366         case OP_PSQ_STU:
367         case OP_LFS:
368         case OP_LFSU:
369         case OP_LFD:
370         case OP_LFDU:
371         case OP_STFS:
372         case OP_STFSU:
373         case OP_STFD:
374         case OP_STFDU:
375                 return true;
376         case 4:
377                 /* X form */
378                 switch (inst_get_field(inst, 21, 30)) {
379                 case OP_4X_PS_CMPU0:
380                 case OP_4X_PSQ_LX:
381                 case OP_4X_PS_CMPO0:
382                 case OP_4X_PSQ_LUX:
383                 case OP_4X_PS_NEG:
384                 case OP_4X_PS_CMPU1:
385                 case OP_4X_PS_MR:
386                 case OP_4X_PS_CMPO1:
387                 case OP_4X_PS_NABS:
388                 case OP_4X_PS_ABS:
389                 case OP_4X_PS_MERGE00:
390                 case OP_4X_PS_MERGE01:
391                 case OP_4X_PS_MERGE10:
392                 case OP_4X_PS_MERGE11:
393                         return true;
394                 }
395                 /* XW form */
396                 switch (inst_get_field(inst, 25, 30)) {
397                 case OP_4XW_PSQ_STX:
398                 case OP_4XW_PSQ_STUX:
399                         return true;
400                 }
401                 /* A form */
402                 switch (inst_get_field(inst, 26, 30)) {
403                 case OP_4A_PS_SUM1:
404                 case OP_4A_PS_SUM0:
405                 case OP_4A_PS_MULS0:
406                 case OP_4A_PS_MULS1:
407                 case OP_4A_PS_MADDS0:
408                 case OP_4A_PS_MADDS1:
409                 case OP_4A_PS_DIV:
410                 case OP_4A_PS_SUB:
411                 case OP_4A_PS_ADD:
412                 case OP_4A_PS_SEL:
413                 case OP_4A_PS_RES:
414                 case OP_4A_PS_MUL:
415                 case OP_4A_PS_RSQRTE:
416                 case OP_4A_PS_MSUB:
417                 case OP_4A_PS_MADD:
418                 case OP_4A_PS_NMSUB:
419                 case OP_4A_PS_NMADD:
420                         return true;
421                 }
422                 break;
423         case 59:
424                 switch (inst_get_field(inst, 21, 30)) {
425                 case OP_59_FADDS:
426                 case OP_59_FSUBS:
427                 case OP_59_FDIVS:
428                 case OP_59_FRES:
429                 case OP_59_FRSQRTES:
430                         return true;
431                 }
432                 switch (inst_get_field(inst, 26, 30)) {
433                 case OP_59_FMULS:
434                 case OP_59_FMSUBS:
435                 case OP_59_FMADDS:
436                 case OP_59_FNMSUBS:
437                 case OP_59_FNMADDS:
438                         return true;
439                 }
440                 break;
441         case 63:
442                 switch (inst_get_field(inst, 21, 30)) {
443                 case OP_63_MTFSB0:
444                 case OP_63_MTFSB1:
445                 case OP_63_MTFSF:
446                 case OP_63_MTFSFI:
447                 case OP_63_MCRFS:
448                 case OP_63_MFFS:
449                 case OP_63_FCMPU:
450                 case OP_63_FCMPO:
451                 case OP_63_FNEG:
452                 case OP_63_FMR:
453                 case OP_63_FABS:
454                 case OP_63_FRSP:
455                 case OP_63_FDIV:
456                 case OP_63_FADD:
457                 case OP_63_FSUB:
458                 case OP_63_FCTIW:
459                 case OP_63_FCTIWZ:
460                 case OP_63_FRSQRTE:
461                 case OP_63_FCPSGN:
462                         return true;
463                 }
464                 switch (inst_get_field(inst, 26, 30)) {
465                 case OP_63_FMUL:
466                 case OP_63_FSEL:
467                 case OP_63_FMSUB:
468                 case OP_63_FMADD:
469                 case OP_63_FNMSUB:
470                 case OP_63_FNMADD:
471                         return true;
472                 }
473                 break;
474         case 31:
475                 switch (inst_get_field(inst, 21, 30)) {
476                 case OP_31_LFSX:
477                 case OP_31_LFSUX:
478                 case OP_31_LFDX:
479                 case OP_31_LFDUX:
480                 case OP_31_STFSX:
481                 case OP_31_STFSUX:
482                 case OP_31_STFX:
483                 case OP_31_STFUX:
484                 case OP_31_STFIWX:
485                         return true;
486                 }
487                 break;
488         }
489
490         return false;
491 }
492
493 static int get_d_signext(u32 inst)
494 {
495         int d = inst & 0x8ff;
496
497         if (d & 0x800)
498                 return -(d & 0x7ff);
499
500         return (d & 0x7ff);
501 }
502
503 static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc,
504                                       int reg_out, int reg_in1, int reg_in2,
505                                       int reg_in3, int scalar,
506                                       void (*func)(u64 *fpscr,
507                                                  u32 *dst, u32 *src1,
508                                                  u32 *src2, u32 *src3))
509 {
510         u32 *qpr = vcpu->arch.qpr;
511         u64 *fpr = vcpu->arch.fpr;
512         u32 ps0_out;
513         u32 ps0_in1, ps0_in2, ps0_in3;
514         u32 ps1_in1, ps1_in2, ps1_in3;
515
516         /* RC */
517         WARN_ON(rc);
518
519         /* PS0 */
520         kvm_cvt_df(&fpr[reg_in1], &ps0_in1, &vcpu->arch.fpscr);
521         kvm_cvt_df(&fpr[reg_in2], &ps0_in2, &vcpu->arch.fpscr);
522         kvm_cvt_df(&fpr[reg_in3], &ps0_in3, &vcpu->arch.fpscr);
523
524         if (scalar & SCALAR_LOW)
525                 ps0_in2 = qpr[reg_in2];
526
527         func(&vcpu->arch.fpscr, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3);
528
529         dprintk(KERN_INFO "PS3 ps0 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
530                           ps0_in1, ps0_in2, ps0_in3, ps0_out);
531
532         if (!(scalar & SCALAR_NO_PS0))
533                 kvm_cvt_fd(&ps0_out, &fpr[reg_out], &vcpu->arch.fpscr);
534
535         /* PS1 */
536         ps1_in1 = qpr[reg_in1];
537         ps1_in2 = qpr[reg_in2];
538         ps1_in3 = qpr[reg_in3];
539
540         if (scalar & SCALAR_HIGH)
541                 ps1_in2 = ps0_in2;
542
543         if (!(scalar & SCALAR_NO_PS1))
544                 func(&vcpu->arch.fpscr, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3);
545
546         dprintk(KERN_INFO "PS3 ps1 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
547                           ps1_in1, ps1_in2, ps1_in3, qpr[reg_out]);
548
549         return EMULATE_DONE;
550 }
551
552 static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc,
553                                     int reg_out, int reg_in1, int reg_in2,
554                                     int scalar,
555                                     void (*func)(u64 *fpscr,
556                                                  u32 *dst, u32 *src1,
557                                                  u32 *src2))
558 {
559         u32 *qpr = vcpu->arch.qpr;
560         u64 *fpr = vcpu->arch.fpr;
561         u32 ps0_out;
562         u32 ps0_in1, ps0_in2;
563         u32 ps1_out;
564         u32 ps1_in1, ps1_in2;
565
566         /* RC */
567         WARN_ON(rc);
568
569         /* PS0 */
570         kvm_cvt_df(&fpr[reg_in1], &ps0_in1, &vcpu->arch.fpscr);
571
572         if (scalar & SCALAR_LOW)
573                 ps0_in2 = qpr[reg_in2];
574         else
575                 kvm_cvt_df(&fpr[reg_in2], &ps0_in2, &vcpu->arch.fpscr);
576
577         func(&vcpu->arch.fpscr, &ps0_out, &ps0_in1, &ps0_in2);
578
579         if (!(scalar & SCALAR_NO_PS0)) {
580                 dprintk(KERN_INFO "PS2 ps0 -> f(0x%x, 0x%x) = 0x%x\n",
581                                   ps0_in1, ps0_in2, ps0_out);
582
583                 kvm_cvt_fd(&ps0_out, &fpr[reg_out], &vcpu->arch.fpscr);
584         }
585
586         /* PS1 */
587         ps1_in1 = qpr[reg_in1];
588         ps1_in2 = qpr[reg_in2];
589
590         if (scalar & SCALAR_HIGH)
591                 ps1_in2 = ps0_in2;
592
593         func(&vcpu->arch.fpscr, &ps1_out, &ps1_in1, &ps1_in2);
594
595         if (!(scalar & SCALAR_NO_PS1)) {
596                 qpr[reg_out] = ps1_out;
597
598                 dprintk(KERN_INFO "PS2 ps1 -> f(0x%x, 0x%x) = 0x%x\n",
599                                   ps1_in1, ps1_in2, qpr[reg_out]);
600         }
601
602         return EMULATE_DONE;
603 }
604
605 static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc,
606                                     int reg_out, int reg_in,
607                                     void (*func)(u64 *t,
608                                                  u32 *dst, u32 *src1))
609 {
610         u32 *qpr = vcpu->arch.qpr;
611         u64 *fpr = vcpu->arch.fpr;
612         u32 ps0_out, ps0_in;
613         u32 ps1_in;
614
615         /* RC */
616         WARN_ON(rc);
617
618         /* PS0 */
619         kvm_cvt_df(&fpr[reg_in], &ps0_in, &vcpu->arch.fpscr);
620         func(&vcpu->arch.fpscr, &ps0_out, &ps0_in);
621
622         dprintk(KERN_INFO "PS1 ps0 -> f(0x%x) = 0x%x\n",
623                           ps0_in, ps0_out);
624
625         kvm_cvt_fd(&ps0_out, &fpr[reg_out], &vcpu->arch.fpscr);
626
627         /* PS1 */
628         ps1_in = qpr[reg_in];
629         func(&vcpu->arch.fpscr, &qpr[reg_out], &ps1_in);
630
631         dprintk(KERN_INFO "PS1 ps1 -> f(0x%x) = 0x%x\n",
632                           ps1_in, qpr[reg_out]);
633
634         return EMULATE_DONE;
635 }
636
637 int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu)
638 {
639         u32 inst = kvmppc_get_last_inst(vcpu);
640         enum emulation_result emulated = EMULATE_DONE;
641
642         int ax_rd = inst_get_field(inst, 6, 10);
643         int ax_ra = inst_get_field(inst, 11, 15);
644         int ax_rb = inst_get_field(inst, 16, 20);
645         int ax_rc = inst_get_field(inst, 21, 25);
646         short full_d = inst_get_field(inst, 16, 31);
647
648         u64 *fpr_d = &vcpu->arch.fpr[ax_rd];
649         u64 *fpr_a = &vcpu->arch.fpr[ax_ra];
650         u64 *fpr_b = &vcpu->arch.fpr[ax_rb];
651         u64 *fpr_c = &vcpu->arch.fpr[ax_rc];
652
653         bool rcomp = (inst & 1) ? true : false;
654         u32 cr = kvmppc_get_cr(vcpu);
655 #ifdef DEBUG
656         int i;
657 #endif
658
659         if (!kvmppc_inst_is_paired_single(vcpu, inst))
660                 return EMULATE_FAIL;
661
662         if (!(vcpu->arch.shared->msr & MSR_FP)) {
663                 kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL);
664                 return EMULATE_AGAIN;
665         }
666
667         kvmppc_giveup_ext(vcpu, MSR_FP);
668         preempt_disable();
669         enable_kernel_fp();
670         /* Do we need to clear FE0 / FE1 here? Don't think so. */
671
672 #ifdef DEBUG
673         for (i = 0; i < ARRAY_SIZE(vcpu->arch.fpr); i++) {
674                 u32 f;
675                 kvm_cvt_df(&vcpu->arch.fpr[i], &f, &vcpu->arch.fpscr);
676                 dprintk(KERN_INFO "FPR[%d] = 0x%x / 0x%llx    QPR[%d] = 0x%x\n",
677                         i, f, vcpu->arch.fpr[i], i, vcpu->arch.qpr[i]);
678         }
679 #endif
680
681         switch (get_op(inst)) {
682         case OP_PSQ_L:
683         {
684                 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
685                 bool w = inst_get_field(inst, 16, 16) ? true : false;
686                 int i = inst_get_field(inst, 17, 19);
687
688                 addr += get_d_signext(inst);
689                 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
690                 break;
691         }
692         case OP_PSQ_LU:
693         {
694                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
695                 bool w = inst_get_field(inst, 16, 16) ? true : false;
696                 int i = inst_get_field(inst, 17, 19);
697
698                 addr += get_d_signext(inst);
699                 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
700
701                 if (emulated == EMULATE_DONE)
702                         kvmppc_set_gpr(vcpu, ax_ra, addr);
703                 break;
704         }
705         case OP_PSQ_ST:
706         {
707                 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
708                 bool w = inst_get_field(inst, 16, 16) ? true : false;
709                 int i = inst_get_field(inst, 17, 19);
710
711                 addr += get_d_signext(inst);
712                 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
713                 break;
714         }
715         case OP_PSQ_STU:
716         {
717                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
718                 bool w = inst_get_field(inst, 16, 16) ? true : false;
719                 int i = inst_get_field(inst, 17, 19);
720
721                 addr += get_d_signext(inst);
722                 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
723
724                 if (emulated == EMULATE_DONE)
725                         kvmppc_set_gpr(vcpu, ax_ra, addr);
726                 break;
727         }
728         case 4:
729                 /* X form */
730                 switch (inst_get_field(inst, 21, 30)) {
731                 case OP_4X_PS_CMPU0:
732                         /* XXX */
733                         emulated = EMULATE_FAIL;
734                         break;
735                 case OP_4X_PSQ_LX:
736                 {
737                         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
738                         bool w = inst_get_field(inst, 21, 21) ? true : false;
739                         int i = inst_get_field(inst, 22, 24);
740
741                         addr += kvmppc_get_gpr(vcpu, ax_rb);
742                         emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
743                         break;
744                 }
745                 case OP_4X_PS_CMPO0:
746                         /* XXX */
747                         emulated = EMULATE_FAIL;
748                         break;
749                 case OP_4X_PSQ_LUX:
750                 {
751                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
752                         bool w = inst_get_field(inst, 21, 21) ? true : false;
753                         int i = inst_get_field(inst, 22, 24);
754
755                         addr += kvmppc_get_gpr(vcpu, ax_rb);
756                         emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
757
758                         if (emulated == EMULATE_DONE)
759                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
760                         break;
761                 }
762                 case OP_4X_PS_NEG:
763                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
764                         vcpu->arch.fpr[ax_rd] ^= 0x8000000000000000ULL;
765                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
766                         vcpu->arch.qpr[ax_rd] ^= 0x80000000;
767                         break;
768                 case OP_4X_PS_CMPU1:
769                         /* XXX */
770                         emulated = EMULATE_FAIL;
771                         break;
772                 case OP_4X_PS_MR:
773                         WARN_ON(rcomp);
774                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
775                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
776                         break;
777                 case OP_4X_PS_CMPO1:
778                         /* XXX */
779                         emulated = EMULATE_FAIL;
780                         break;
781                 case OP_4X_PS_NABS:
782                         WARN_ON(rcomp);
783                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
784                         vcpu->arch.fpr[ax_rd] |= 0x8000000000000000ULL;
785                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
786                         vcpu->arch.qpr[ax_rd] |= 0x80000000;
787                         break;
788                 case OP_4X_PS_ABS:
789                         WARN_ON(rcomp);
790                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rb];
791                         vcpu->arch.fpr[ax_rd] &= ~0x8000000000000000ULL;
792                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
793                         vcpu->arch.qpr[ax_rd] &= ~0x80000000;
794                         break;
795                 case OP_4X_PS_MERGE00:
796                         WARN_ON(rcomp);
797                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_ra];
798                         /* vcpu->arch.qpr[ax_rd] = vcpu->arch.fpr[ax_rb]; */
799                         kvm_cvt_df(&vcpu->arch.fpr[ax_rb],
800                                    &vcpu->arch.qpr[ax_rd],
801                                    &vcpu->arch.fpscr);
802                         break;
803                 case OP_4X_PS_MERGE01:
804                         WARN_ON(rcomp);
805                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_ra];
806                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
807                         break;
808                 case OP_4X_PS_MERGE10:
809                         WARN_ON(rcomp);
810                         /* vcpu->arch.fpr[ax_rd] = vcpu->arch.qpr[ax_ra]; */
811                         kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
812                                    &vcpu->arch.fpr[ax_rd],
813                                    &vcpu->arch.fpscr);
814                         /* vcpu->arch.qpr[ax_rd] = vcpu->arch.fpr[ax_rb]; */
815                         kvm_cvt_df(&vcpu->arch.fpr[ax_rb],
816                                    &vcpu->arch.qpr[ax_rd],
817                                    &vcpu->arch.fpscr);
818                         break;
819                 case OP_4X_PS_MERGE11:
820                         WARN_ON(rcomp);
821                         /* vcpu->arch.fpr[ax_rd] = vcpu->arch.qpr[ax_ra]; */
822                         kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
823                                    &vcpu->arch.fpr[ax_rd],
824                                    &vcpu->arch.fpscr);
825                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
826                         break;
827                 }
828                 /* XW form */
829                 switch (inst_get_field(inst, 25, 30)) {
830                 case OP_4XW_PSQ_STX:
831                 {
832                         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
833                         bool w = inst_get_field(inst, 21, 21) ? true : false;
834                         int i = inst_get_field(inst, 22, 24);
835
836                         addr += kvmppc_get_gpr(vcpu, ax_rb);
837                         emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
838                         break;
839                 }
840                 case OP_4XW_PSQ_STUX:
841                 {
842                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
843                         bool w = inst_get_field(inst, 21, 21) ? true : false;
844                         int i = inst_get_field(inst, 22, 24);
845
846                         addr += kvmppc_get_gpr(vcpu, ax_rb);
847                         emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
848
849                         if (emulated == EMULATE_DONE)
850                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
851                         break;
852                 }
853                 }
854                 /* A form */
855                 switch (inst_get_field(inst, 26, 30)) {
856                 case OP_4A_PS_SUM1:
857                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
858                                         ax_rb, ax_ra, SCALAR_NO_PS0 | SCALAR_HIGH, fps_fadds);
859                         vcpu->arch.fpr[ax_rd] = vcpu->arch.fpr[ax_rc];
860                         break;
861                 case OP_4A_PS_SUM0:
862                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
863                                         ax_ra, ax_rb, SCALAR_NO_PS1 | SCALAR_LOW, fps_fadds);
864                         vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc];
865                         break;
866                 case OP_4A_PS_MULS0:
867                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
868                                         ax_ra, ax_rc, SCALAR_HIGH, fps_fmuls);
869                         break;
870                 case OP_4A_PS_MULS1:
871                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
872                                         ax_ra, ax_rc, SCALAR_LOW, fps_fmuls);
873                         break;
874                 case OP_4A_PS_MADDS0:
875                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
876                                         ax_ra, ax_rc, ax_rb, SCALAR_HIGH, fps_fmadds);
877                         break;
878                 case OP_4A_PS_MADDS1:
879                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
880                                         ax_ra, ax_rc, ax_rb, SCALAR_LOW, fps_fmadds);
881                         break;
882                 case OP_4A_PS_DIV:
883                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
884                                         ax_ra, ax_rb, SCALAR_NONE, fps_fdivs);
885                         break;
886                 case OP_4A_PS_SUB:
887                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
888                                         ax_ra, ax_rb, SCALAR_NONE, fps_fsubs);
889                         break;
890                 case OP_4A_PS_ADD:
891                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
892                                         ax_ra, ax_rb, SCALAR_NONE, fps_fadds);
893                         break;
894                 case OP_4A_PS_SEL:
895                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
896                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fsel);
897                         break;
898                 case OP_4A_PS_RES:
899                         emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
900                                         ax_rb, fps_fres);
901                         break;
902                 case OP_4A_PS_MUL:
903                         emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
904                                         ax_ra, ax_rc, SCALAR_NONE, fps_fmuls);
905                         break;
906                 case OP_4A_PS_RSQRTE:
907                         emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
908                                         ax_rb, fps_frsqrte);
909                         break;
910                 case OP_4A_PS_MSUB:
911                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
912                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmsubs);
913                         break;
914                 case OP_4A_PS_MADD:
915                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
916                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmadds);
917                         break;
918                 case OP_4A_PS_NMSUB:
919                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
920                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmsubs);
921                         break;
922                 case OP_4A_PS_NMADD:
923                         emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
924                                         ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmadds);
925                         break;
926                 }
927                 break;
928
929         /* Real FPU operations */
930
931         case OP_LFS:
932         {
933                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
934
935                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
936                                                    FPU_LS_SINGLE);
937                 break;
938         }
939         case OP_LFSU:
940         {
941                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
942
943                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
944                                                    FPU_LS_SINGLE);
945
946                 if (emulated == EMULATE_DONE)
947                         kvmppc_set_gpr(vcpu, ax_ra, addr);
948                 break;
949         }
950         case OP_LFD:
951         {
952                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
953
954                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
955                                                    FPU_LS_DOUBLE);
956                 break;
957         }
958         case OP_LFDU:
959         {
960                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
961
962                 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
963                                                    FPU_LS_DOUBLE);
964
965                 if (emulated == EMULATE_DONE)
966                         kvmppc_set_gpr(vcpu, ax_ra, addr);
967                 break;
968         }
969         case OP_STFS:
970         {
971                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
972
973                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
974                                                     FPU_LS_SINGLE);
975                 break;
976         }
977         case OP_STFSU:
978         {
979                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
980
981                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
982                                                     FPU_LS_SINGLE);
983
984                 if (emulated == EMULATE_DONE)
985                         kvmppc_set_gpr(vcpu, ax_ra, addr);
986                 break;
987         }
988         case OP_STFD:
989         {
990                 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
991
992                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
993                                                     FPU_LS_DOUBLE);
994                 break;
995         }
996         case OP_STFDU:
997         {
998                 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
999
1000                 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1001                                                     FPU_LS_DOUBLE);
1002
1003                 if (emulated == EMULATE_DONE)
1004                         kvmppc_set_gpr(vcpu, ax_ra, addr);
1005                 break;
1006         }
1007         case 31:
1008                 switch (inst_get_field(inst, 21, 30)) {
1009                 case OP_31_LFSX:
1010                 {
1011                         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
1012
1013                         addr += kvmppc_get_gpr(vcpu, ax_rb);
1014                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1015                                                            addr, FPU_LS_SINGLE);
1016                         break;
1017                 }
1018                 case OP_31_LFSUX:
1019                 {
1020                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1021                                      kvmppc_get_gpr(vcpu, ax_rb);
1022
1023                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1024                                                            addr, FPU_LS_SINGLE);
1025
1026                         if (emulated == EMULATE_DONE)
1027                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1028                         break;
1029                 }
1030                 case OP_31_LFDX:
1031                 {
1032                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1033                                      kvmppc_get_gpr(vcpu, ax_rb);
1034
1035                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1036                                                            addr, FPU_LS_DOUBLE);
1037                         break;
1038                 }
1039                 case OP_31_LFDUX:
1040                 {
1041                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1042                                      kvmppc_get_gpr(vcpu, ax_rb);
1043
1044                         emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1045                                                            addr, FPU_LS_DOUBLE);
1046
1047                         if (emulated == EMULATE_DONE)
1048                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1049                         break;
1050                 }
1051                 case OP_31_STFSX:
1052                 {
1053                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1054                                      kvmppc_get_gpr(vcpu, ax_rb);
1055
1056                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1057                                                             addr, FPU_LS_SINGLE);
1058                         break;
1059                 }
1060                 case OP_31_STFSUX:
1061                 {
1062                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1063                                      kvmppc_get_gpr(vcpu, ax_rb);
1064
1065                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1066                                                             addr, FPU_LS_SINGLE);
1067
1068                         if (emulated == EMULATE_DONE)
1069                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1070                         break;
1071                 }
1072                 case OP_31_STFX:
1073                 {
1074                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1075                                      kvmppc_get_gpr(vcpu, ax_rb);
1076
1077                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1078                                                             addr, FPU_LS_DOUBLE);
1079                         break;
1080                 }
1081                 case OP_31_STFUX:
1082                 {
1083                         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1084                                      kvmppc_get_gpr(vcpu, ax_rb);
1085
1086                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1087                                                             addr, FPU_LS_DOUBLE);
1088
1089                         if (emulated == EMULATE_DONE)
1090                                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1091                         break;
1092                 }
1093                 case OP_31_STFIWX:
1094                 {
1095                         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1096                                      kvmppc_get_gpr(vcpu, ax_rb);
1097
1098                         emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1099                                                             addr,
1100                                                             FPU_LS_SINGLE_LOW);
1101                         break;
1102                 }
1103                         break;
1104                 }
1105                 break;
1106         case 59:
1107                 switch (inst_get_field(inst, 21, 30)) {
1108                 case OP_59_FADDS:
1109                         fpd_fadds(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1110                         kvmppc_sync_qpr(vcpu, ax_rd);
1111                         break;
1112                 case OP_59_FSUBS:
1113                         fpd_fsubs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1114                         kvmppc_sync_qpr(vcpu, ax_rd);
1115                         break;
1116                 case OP_59_FDIVS:
1117                         fpd_fdivs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1118                         kvmppc_sync_qpr(vcpu, ax_rd);
1119                         break;
1120                 case OP_59_FRES:
1121                         fpd_fres(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1122                         kvmppc_sync_qpr(vcpu, ax_rd);
1123                         break;
1124                 case OP_59_FRSQRTES:
1125                         fpd_frsqrtes(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1126                         kvmppc_sync_qpr(vcpu, ax_rd);
1127                         break;
1128                 }
1129                 switch (inst_get_field(inst, 26, 30)) {
1130                 case OP_59_FMULS:
1131                         fpd_fmuls(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1132                         kvmppc_sync_qpr(vcpu, ax_rd);
1133                         break;
1134                 case OP_59_FMSUBS:
1135                         fpd_fmsubs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1136                         kvmppc_sync_qpr(vcpu, ax_rd);
1137                         break;
1138                 case OP_59_FMADDS:
1139                         fpd_fmadds(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1140                         kvmppc_sync_qpr(vcpu, ax_rd);
1141                         break;
1142                 case OP_59_FNMSUBS:
1143                         fpd_fnmsubs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1144                         kvmppc_sync_qpr(vcpu, ax_rd);
1145                         break;
1146                 case OP_59_FNMADDS:
1147                         fpd_fnmadds(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1148                         kvmppc_sync_qpr(vcpu, ax_rd);
1149                         break;
1150                 }
1151                 break;
1152         case 63:
1153                 switch (inst_get_field(inst, 21, 30)) {
1154                 case OP_63_MTFSB0:
1155                 case OP_63_MTFSB1:
1156                 case OP_63_MCRFS:
1157                 case OP_63_MTFSFI:
1158                         /* XXX need to implement */
1159                         break;
1160                 case OP_63_MFFS:
1161                         /* XXX missing CR */
1162                         *fpr_d = vcpu->arch.fpscr;
1163                         break;
1164                 case OP_63_MTFSF:
1165                         /* XXX missing fm bits */
1166                         /* XXX missing CR */
1167                         vcpu->arch.fpscr = *fpr_b;
1168                         break;
1169                 case OP_63_FCMPU:
1170                 {
1171                         u32 tmp_cr;
1172                         u32 cr0_mask = 0xf0000000;
1173                         u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1174
1175                         fpd_fcmpu(&vcpu->arch.fpscr, &tmp_cr, fpr_a, fpr_b);
1176                         cr &= ~(cr0_mask >> cr_shift);
1177                         cr |= (cr & cr0_mask) >> cr_shift;
1178                         break;
1179                 }
1180                 case OP_63_FCMPO:
1181                 {
1182                         u32 tmp_cr;
1183                         u32 cr0_mask = 0xf0000000;
1184                         u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1185
1186                         fpd_fcmpo(&vcpu->arch.fpscr, &tmp_cr, fpr_a, fpr_b);
1187                         cr &= ~(cr0_mask >> cr_shift);
1188                         cr |= (cr & cr0_mask) >> cr_shift;
1189                         break;
1190                 }
1191                 case OP_63_FNEG:
1192                         fpd_fneg(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1193                         break;
1194                 case OP_63_FMR:
1195                         *fpr_d = *fpr_b;
1196                         break;
1197                 case OP_63_FABS:
1198                         fpd_fabs(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1199                         break;
1200                 case OP_63_FCPSGN:
1201                         fpd_fcpsgn(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1202                         break;
1203                 case OP_63_FDIV:
1204                         fpd_fdiv(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1205                         break;
1206                 case OP_63_FADD:
1207                         fpd_fadd(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1208                         break;
1209                 case OP_63_FSUB:
1210                         fpd_fsub(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1211                         break;
1212                 case OP_63_FCTIW:
1213                         fpd_fctiw(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1214                         break;
1215                 case OP_63_FCTIWZ:
1216                         fpd_fctiwz(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1217                         break;
1218                 case OP_63_FRSP:
1219                         fpd_frsp(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1220                         kvmppc_sync_qpr(vcpu, ax_rd);
1221                         break;
1222                 case OP_63_FRSQRTE:
1223                 {
1224                         double one = 1.0f;
1225
1226                         /* fD = sqrt(fB) */
1227                         fpd_fsqrt(&vcpu->arch.fpscr, &cr, fpr_d, fpr_b);
1228                         /* fD = 1.0f / fD */
1229                         fpd_fdiv(&vcpu->arch.fpscr, &cr, fpr_d, (u64*)&one, fpr_d);
1230                         break;
1231                 }
1232                 }
1233                 switch (inst_get_field(inst, 26, 30)) {
1234                 case OP_63_FMUL:
1235                         fpd_fmul(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1236                         break;
1237                 case OP_63_FSEL:
1238                         fpd_fsel(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1239                         break;
1240                 case OP_63_FMSUB:
1241                         fpd_fmsub(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1242                         break;
1243                 case OP_63_FMADD:
1244                         fpd_fmadd(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1245                         break;
1246                 case OP_63_FNMSUB:
1247                         fpd_fnmsub(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1248                         break;
1249                 case OP_63_FNMADD:
1250                         fpd_fnmadd(&vcpu->arch.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1251                         break;
1252                 }
1253                 break;
1254         }
1255
1256 #ifdef DEBUG
1257         for (i = 0; i < ARRAY_SIZE(vcpu->arch.fpr); i++) {
1258                 u32 f;
1259                 kvm_cvt_df(&vcpu->arch.fpr[i], &f, &vcpu->arch.fpscr);
1260                 dprintk(KERN_INFO "FPR[%d] = 0x%x\n", i, f);
1261         }
1262 #endif
1263
1264         if (rcomp)
1265                 kvmppc_set_cr(vcpu, cr);
1266
1267         preempt_enable();
1268
1269         return emulated;
1270 }